﻿using System;
using System.Drawing;
using System.IO;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Windows.Forms;
using NeuronDotNet.Controls;
using NeuronDotNet.Core;
using NeuronDotNet.Core.Backpropagation;
using System.Collections.Generic;
using NeuronDotNet.Samples.OCR.layoutsstategies;
using NeuronDotNet.Samples.OCR;

namespace NeuronDotNet.Samples.CharacterRecognition
{
    public partial class MainForm : Form
    {
        Alphabet currentLetter = null;
        TrainConfiguration config;

        private static readonly string[] letters = 
        {
            "A", "B", "C", "D", "E", "F", "G", "H", "I",
            "J", "K", "L", "M", "N", "O", "P", "Q", "R",
            "S", "T", "U", "V", "W", "X", "Y", "Z"
        };

        private static readonly string[] counts = 
        {
            "1","2","3","4","5"
        };
        
        //length of the longest supported word
        private static int maxWordLength = counts.Length; 

        public MainForm()
        {
            int inputLayerSize = 400;
            int innerLayerSize = 4;
            int outputLayerSize = 2;
            int defaultTotalCyclesCount = 200;
            int startCyclesCount = 35;
            double acceptableDifference = 0.4;
            config = TrainConfiguration.GetInstance();
            config.setConfiguration(
                inputLayerSize,
                innerLayerSize,
                outputLayerSize,
                defaultTotalCyclesCount,
                startCyclesCount,
                acceptableDifference);
            InitializeComponent();
        }

        private void LoadLetter(object sender, EventArgs e)
        {
            currentLetter = Alphabet.GetLetter(cboAplhabet.SelectedIndex);
            SetLabels();
        }

        private void AddInstance(object sender, EventArgs e)
        {
            currentLetter.AddInstance(picTraining.Letter);
            SetLabels();
        }

        private void RemoveInstance(object sender, EventArgs e)
        {
            currentLetter.RemoveCurrentInstance();
            SetLabels();
        }

        private void RemoveAll(object sender, EventArgs e)
        {
            if (MessageBox.Show("Are you sure?", "Confirm Removal", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question) == DialogResult.Yes)
            {
                currentLetter.RemoveAll();
                SetLabels();
            }
        }

        private void ClearImage(object sender, EventArgs e)
        {
            btnClearImage.Enabled = false;
            btnAdd.Enabled = false;
            btnRemove.Enabled = false;
            picTraining.Letter = new Letter();
            picTraining.Invalidate();
            lblOf.Text = "New";
        }

        private void MoveNext(object sender, EventArgs e)
        {
            currentLetter.MoveNext();
            SetLabels();
        }

        private void MovePrevious(object sender, EventArgs e)
        {
            currentLetter.MovePrevious();
            SetLabels();
        }

        private void SetLabels()
        {
            if (this.currentLetter.InstancesCount > 0)
            {
                lblOf.Text = (currentLetter.InstanceIndex + 1) + " of " + currentLetter.InstancesCount.ToString();
                btnPrev.Enabled = true;
                btnNext.Enabled = true;
                btnClearImage.Enabled = true;
                btnRemoveAll.Enabled = true;
                btnRemove.Enabled = true;
                btnAdd.Enabled = true;
                picTraining.Letter = currentLetter.CurrentInstance;
            }
            else
            {
                lblOf.Text = "0 of 0";
                btnPrev.Enabled = false;
                btnNext.Enabled = false;
                btnClearImage.Enabled = false;
                btnRemove.Enabled = false;
                btnRemoveAll.Enabled = false;
                btnAdd.Enabled = false;
                picTraining.Letter = new Letter();
            }
            picTraining.Invalidate();
        }

        private void Clear(object sender, EventArgs e)
        {
            picCompressed.Invalidate();
            picRecognition.Letter = new Letter();
            picRecognition.Invalidate();
            lblResult.Visible = false;
            lblPreResult.Visible = false;
        }

        private void TrainingPicMousedown(object sender, MouseEventArgs e)
        {
            btnAdd.Enabled = true;
            btnRemove.Enabled = false;
            btnClearImage.Enabled = true;
            lblOf.Text = "New";
        }

        private void RecognitionPicMousedown(object sender, MouseEventArgs e)
        {
            lblResult.Visible = false;
            lblPreResult.Visible = false;
        }

        private string RecognizeLetter(double[] letterVector)
        {
            int winner = 0;
            int current = 1;

            lstSimilarityTests.Items.Clear();
            picCompressed.Invalidate();
            lblResult.Text = "";
            while (current < Alphabet.LetterCount)
            {
                try
                {
                    using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + winner.ToString("00") + current.ToString("00") + ".ndn", FileMode.Open))
                    {
                        IFormatter formatter = new BinaryFormatter();
                        INetwork network = (INetwork)formatter.Deserialize(stream);

                        double[] output = network.Run(letterVector);
                        string result = letters[winner] + " vs " + letters[current] + " = ";
                        if (output[1] > output[0])
                        {
                            winner = current;
                        }
                        result += letters[winner];
                        lstSimilarityTests.Items.Add(result);
                        lstSimilarityTests.TopIndex = lstSimilarityTests.Items.Count - (int)(lstSimilarityTests.Height / lstSimilarityTests.ItemHeight);
                    }
                    current++;
                }
                catch (Exception)
                {
                    MessageBox.Show("Failed to load saved neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                    return "";
                }
            }
            return letters[winner];
        }

        private void Recognize(object sender, EventArgs e)
        {
            lblResult.Visible = false;
            lblPreResult.Visible = false;

            Rectangle rect = new Rectangle(0, 0, picRecognition.Width, picRecognition.Height);

            DivisionBuilder strtg = new DivisionBuilder(rect, picRecognition.Letter);
            StringImageLayout imLayout = new StringImageLayout(picRecognition.Letter, strtg);

            /*FixedStrategy strtg = new FixedStrategy(rect, 2);
            StringImageLayout imLayout = new StringImageLayout(picRecognition.Letter, strtg);*/

            string word = "";
            int i = 0;
            foreach (Letter letter in imLayout.GetClippedLetters())
            {
                Bitmap map = letter.MakeBitmap();
                map.Save("tmp_" + i + ".bmp");
                i++;
                word += RecognizeLetter(letter.GetEquivalentVector(20, 20));
            }

            lblResult.Text = word;
            lblResult.Visible = true;
            lblPreResult.Text = "The word is recognized as ";
            lblPreResult.Visible = true;
        }

        /// <summary>
        /// Creates a network which calculate word`s length
        /// </summary>
        private void TrainWordLengthNetwork()
        {
            //for progress reflection
            int currentCombination = 0;
            int totalCombinations = maxWordLength * (maxWordLength - 1) / 2;

            for (int i = 0; i < maxWordLength; i++)
            {
                for (int j = i + 1; j < maxWordLength; j++)
                {
                    progressTraining.Value = 100 * currentCombination / totalCombinations;
                    CreateNetwork(i, j);
                    currentCombination++;
                }
            }
            progressTraining.Value = 0;
            btnTrain.Enabled = true;
        }

        /// <summary>
        /// Train and write network, which matches length of input word
        /// with i and j
        /// </summary>
        /// <param name="i"></param>
        /// <param name="j"></param>
        private void CreateNetwork(int i, int j)
        {
            BackpropagationNetwork network = InitializeNetwork();
            Alphabet i_Letters = Alphabet.GetLetter(i);
            Alphabet j_Letters = Alphabet.GetLetter(j);
            TrainingSet trainingSet = CreateTrainingSet(i_Letters, j_Letters);

            Application.DoEvents();

            TrainNetwork(trainingSet, network);

            try
            {
                using (Stream stream = 
                    File.Open(CountNetworkPath(i_Letters.LetterIndex,j_Letters.LetterIndex), 
                    FileMode.Create))
                {
                    IFormatter formatter = new BinaryFormatter();
                    formatter.Serialize(stream, network);
                }
            }
            catch (IOException)
            {
                MessageBox.Show("Failed to save trained neural networks",
                "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
        }

        private BackpropagationNetwork InitializeNetwork()
        {
            ActivationLayer inputLayer = new LinearLayer(config.getInputLayerSize());
            ActivationLayer hiddenLayer = new SigmoidLayer(config.getInnerLayerSize());
            ActivationLayer outputLayer = new SigmoidLayer(config.getOutputLayerSize());
            new BackpropagationConnector(inputLayer, hiddenLayer);
            new BackpropagationConnector(hiddenLayer, outputLayer);
            return new BackpropagationNetwork(inputLayer, outputLayer);
        }

        /// <summary>
        /// Creates a TrainingSet from elements of firstWordLength and secondWordLength
        /// pattern alphabets
        /// </summary>
        /// <param name="firstWordLength"></param>
        /// <param name="secondWordLength"></param>
        /// <returns></returns>
        private TrainingSet CreateTrainingSet(Alphabet firstWordLength, Alphabet secondWordLength)
        {
            TrainingSet trainingSet = new TrainingSet(
                config.getInputLayerSize(),
                config.getOutputLayerSize());

            foreach (Letter instance in firstWordLength.Instances)
            {
                trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 1d, 0d }));
            }

            foreach (Letter instance in secondWordLength.Instances)
            {
                trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 0d, 1d }));
            }
            return trainingSet;
        }

        /// <summary>
        /// Backpropagation
        /// </summary>
        /// <param name="trainingSet"></param>
        /// <param name="network"></param>
        private void TrainNetwork(TrainingSet trainingSet, BackpropagationNetwork network)
        {
            int cycles;
            if (!int.TryParse(txtCycles.Text, out cycles)) 
            { 
                cycles = config.getTotalCyclesCount(); //200 for incorrect input
            }
            txtCycles.Text = cycles.ToString();

            bool correct = false;

            int currentCycles = config.getStartCyclesCount();
            int count = trainingSet.TrainingSampleCount;

            while (correct == false & currentCycles <= cycles)
            {
                network.Initialize();
                network.Learn(trainingSet, currentCycles);
                correct = true;
                for (int sampleIndex = 0; sampleIndex < count; sampleIndex++)
                {
                    double[] op = network.Run(trainingSet[sampleIndex].InputVector);
                    double diff = op[0] - op[1];
                    if (((trainingSet[sampleIndex].OutputVector[0] > trainingSet[sampleIndex].OutputVector[1])
                          && diff < config.getAcceptableDifference())
                     || ((trainingSet[sampleIndex].OutputVector[0] < trainingSet[sampleIndex].OutputVector[1])
                          && diff > config.getAcceptableDifference()))
                    {
                        correct = false;
                        trainingSet.Add(trainingSet[sampleIndex]);
                    }
                }
                currentCycles *= 2;
            }
        }

        /// <summary>
        /// run WordLengthNetworks on input image
        /// </summary>
        /// <returns></returns>
        private void CalcWordLength()
        {
            //compression and scan the image
            double[] input = picRecognition.Letter.GetEquivalentVector(20, 20);

            int winner = 0;
            int current = 1;

            while (current < maxWordLength)
            {
                try
                {
                    using (Stream stream = 
                        File.Open(CountNetworkPath(winner, current), FileMode.Open))
                    {
                        IFormatter formatter = new BinaryFormatter();
                        INetwork network = (INetwork)formatter.Deserialize(stream);

                        double[] output = network.Run(input);
                        if (output[1] > output[0])
                        {
                            winner = current;
                        }
                    }
                    current++;
                }
                catch (IOException)
                {
                    MessageBox.Show("Failed to load saved neural networks",
                    "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            lblResult.Text = (winner + 1).ToString();
            lblResult.Visible = true;
            lblPreResult.Text = "The number of characters is ";
            lblPreResult.Visible = true;
        }

        private String CountNetworkPath(int firstLength, int secondLength)
        {
            return Application.StartupPath + @"\CountNetworks\" + firstLength.ToString()
                             + secondLength.ToString() + ".ndn";
        }

        private void LoadForm(object sender, EventArgs e)
        {
            cboAplhabet.Items.AddRange(counts);
            cboAplhabet.SelectedIndex = 0;
        }

        private void DrawCompressed(object sender, PaintEventArgs e)
        {
            double[] bitArray = picRecognition.Letter.GetEquivalentVector(20, 20);
            using (Bitmap map = new Bitmap(20, 20))
            {
                int k = 0;
                for (int i = 0; i < 20; i++)
                {
                    for (int j = 0; j < 20; j++)
                    {
                        int darkness = (int)((1 - bitArray[k++]) * 255);
                        map.SetPixel(j, i, Color.FromArgb(darkness, darkness, darkness));
                    }
                }
                e.Graphics.DrawImage(map, 1, 1, 40, 40);
            }
        }

        private void WordLengthButtonClick(object sender, EventArgs e)
        {
            CalcWordLength();
        }

        private void TrainButtonClick(object sender, EventArgs e)
        {
            btnTrain.Enabled = false;
            TrainWordLengthNetwork();
        }
    }
}