﻿

using System;
using System.Drawing;
using System.IO;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Windows.Forms;
using NeuronDotNet.Controls;
using NeuronDotNet.Core;
using NeuronDotNet.Core.Backpropagation;
using uitocr.Math;

using uitocr.Statistics.Kernels;
using uitocr.MachineLearning.VectorMachines.Learning;
using uitocr.MachineLearning.VectorMachines;
using System.Diagnostics;

namespace NeuronDotNet.Samples.CharacterRecognition
{
    public partial class newstyle : Form
    {
        Alphabet currentLetter = null;

        private static readonly string[] letters = 
        {
            "A", "a", "B", "b", "c", "C", "d","Đ", "D", 
            "đ", "e", "E", "F", "f", "G", "g","h", "H",
            "I", "J", "K", "k", "L", "l" ,"m","M", "N",
            "n", "o", "P", "Q", "q", "R" ,"r","S", "T",
            "t", "u", "Ư", "U", "ư", "V" ,"W","X", "Y",
            "y", "z", 
            // tahoma
            "A" ,"a" ,"B" ,"b", "c", "d", "D", "Đ", "đ", "e",
            "E" ,"F" ,"f" ,"G", "g", "h", "H", "I", "J", "K",
            "k" ,"L" ,"m" ,"M" ,"N", "n", "o", "p", "P", "Q",
            "q" ,"R" ,"r" ,"S" ,"T", "t", "u", "ư", "U", "Ư",
            "v" ,"W" ,"X" ,"Y" ,"y", "z", "l", "p",
            //times new roman sign
            "`","/","~","?","^","_","A", "B", "C", "D", "E",
            "F", "G", "H", "I",
            "J", "K", "L", "M", "N", "O", "P", "Q", "R",
            "S", "T", "U", "V", "W", "X", "Y", "Z"


        };

        MulticlassSupportVectorMachine ksvm = null;

        public newstyle()
        {
            InitializeComponent();
            comboBox1.SelectedItem = 0;
            checkBox1.Checked = true;
        }

        private void LoadLetter(object sender, EventArgs e)
        {
            currentLetter = Alphabet.GetLetter(cboAplhabet.SelectedIndex);
            SetLabels();
        }

        private void AddInstance(object sender, EventArgs e)
        {
            currentLetter.AddInstance(picTraining.Letter);
            SetLabels();
        }

        private void RemoveInstance(object sender, EventArgs e)
        {
            currentLetter.RemoveCurrentInstance();
            SetLabels();
        }

        private void RemoveAll(object sender, EventArgs e)
        {
            if (MessageBox.Show("Are you sure?", "Confirm Removal", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question) == DialogResult.Yes)
            {
                currentLetter.RemoveAll();
                SetLabels();
            }
        }

        private void ClearImage(object sender, EventArgs e)
        {
            btnClearImage.Enabled = false;
            btnAdd.Enabled = false;
            btnRemove.Enabled = false;
            picTraining.Letter = new Letter();
            picTraining.Invalidate();
            lblOf.Text = "New";
        }

        private void MoveNext(object sender, EventArgs e)
        {
            currentLetter.MoveNext();
            SetLabels();
        }

        private void MovePrevious(object sender, EventArgs e)
        {
            currentLetter.MovePrevious();
            SetLabels();
        }

        private void SetLabels()
        {
            if (this.currentLetter.InstancesCount > 0)
            {
                lblOf.Text = (currentLetter.InstanceIndex + 1) + " of " + currentLetter.InstancesCount.ToString();
                btnPrev.Enabled = true;
                btnNext.Enabled = true;
                btnClearImage.Enabled = true;
                btnRemoveAll.Enabled = true;
                btnRemove.Enabled = true;
                btnAdd.Enabled = true;
                picTraining.Letter = currentLetter.CurrentInstance;
            }
            else
            {
                lblOf.Text = "0 of 0";
                btnPrev.Enabled = false;
                btnNext.Enabled = false;
                btnClearImage.Enabled = false;
                btnRemove.Enabled = false;
                btnRemoveAll.Enabled = false;
                btnAdd.Enabled = false;
                picTraining.Letter = new Letter();
            }
            picTraining.Invalidate();
        }

       

        private void TrainingPicMousedown(object sender, MouseEventArgs e)
        {
            btnAdd.Enabled = true;
            btnRemove.Enabled = false;
            btnClearImage.Enabled = true;
            lblOf.Text = "New";
        }

      


        private void LoadForm(object sender, EventArgs e)
        {
            cboAplhabet.Items.AddRange(letters);
            cboAplhabet.SelectedIndex = 0;
        }

        private void btnClassify_Click(object sender, EventArgs e)
        {

        }

        private void btnSampleRunAnalysis_Click(object sender, EventArgs e)
        {
            train(checkBox1.Checked);
        }
        private void train(bool p)
        {
            if (p)
                ann_Train();
            else
                svm_train();
        }
        private void checkBox1_CheckedChanged(object sender, EventArgs e)
        {
            applysetting();
        }

        private void applysetting()
        {
            checkBox1.Checked = !checkBox2.Checked;
            
        }

        private void checkBox2_CheckedChanged(object sender, EventArgs e)
        {
            applysetting();
        }
        

        private void ann_Train()
        {
            int k = 0;

            int cycles = 200;
            cycles = (int)numCycles.Value;
            int currentCombination = 0;
            int totalCombinations = Alphabet.LetterCount * (Alphabet.LetterCount - 1) / 2;

            for (int i = 0; i < Alphabet.LetterCount; i++)
            {
                for (int j = i + 1; j < Alphabet.LetterCount; j++)
                {
                    ActivationLayer inputLayer = new LinearLayer(400);
                    ActivationLayer hiddenLayer = new SigmoidLayer(4);
                    ActivationLayer outputLayer = new SigmoidLayer(2);
                    new BackpropagationConnector(inputLayer, hiddenLayer);
                    new BackpropagationConnector(hiddenLayer, outputLayer);
                    BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

                    TrainingSet trainingSet = new TrainingSet(400, 2);
                    Alphabet ithLetter = Alphabet.GetLetter(i);
                    Alphabet jthLetter = Alphabet.GetLetter(j);
                    foreach (Letter instance in ithLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 1d, 0d }));
                    }
                    foreach (Letter instance in jthLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 0d, 1d }));
                    }

                    //toolStripProgressBar1.Value = 100 * currentCombination / totalCombinations;

                    Application.DoEvents();

                    bool correct = false;

                    int currentCycles = 35;
                    int count = trainingSet.TrainingSampleCount;

                    while (correct == false & currentCycles <= cycles)
                    {
                        network.Initialize();
                        network.Learn(trainingSet, currentCycles);
                        correct = true;
                        for (int sampleIndex = 0; sampleIndex < count; sampleIndex++)
                        {
                            double[] op = network.Run(trainingSet[sampleIndex].InputVector);
                            if (((trainingSet[sampleIndex].OutputVector[0] > trainingSet[sampleIndex].OutputVector[1]) && op[0] - op[1] < 0.4) || ((trainingSet[sampleIndex].OutputVector[0] < trainingSet[sampleIndex].OutputVector[1]) && op[1] - op[0] < 0.4))
                            {
                                correct = false;
                                trainingSet.Add(trainingSet[sampleIndex]);
                            }
                        }
                        currentCycles *= 2;
                    }
                        dataGridView1.Rows.Add(k++, i + " & " + j, network.MeanSquaredError.ToString());

                    try
                    {
                        using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + i.ToString("00") + j.ToString("00") + ".ndn", FileMode.Create))
                        {
                            IFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                            formatter.Serialize(stream, network);
                        }
                    }
                    catch (Exception)
                    {
                        MessageBox.Show("Failed to save trained neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                        return;
                    }
                    currentCombination++;
                }
            }
            //toolStripProgressBar1.Value = 0;
        }

        private void svm_train()
        {
            int rows = 0;

            for (int i = 0; i < Alphabet.LetterCount; i++)
            {

                rows += Alphabet.GetLetter(i).InstancesCount; ;

            }
            double[][] input = new double[rows][];
            int[] output = new int[rows];
            for (int i = 0; i < 26; i++)
                for (int j = 0; j < Alphabet.GetLetter(i).InstancesCount; j++)
                    output[i * Alphabet.GetLetter(i).InstancesCount + j] = i;
            int inc = 0;
            for (int i = 0; i < Alphabet.LetterCount; i++)
            {



                Alphabet ithLetter = Alphabet.GetLetter(i);


                foreach (Letter instance in ithLetter.Instances)
                {

                    input[inc++] = instance.GetEquivalentVector(20, 20);
                }
            }



            // Extract inputs and outputs

            

            // Create the chosen Kernel with given parameters
            IKernel kernel;
            if (rbGaussian.Checked)
                kernel = new Gaussian((double)numSigma.Value);
            else
                kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value);

            // Create the Multi-class Support Vector Machine using the selected Kernel
            ksvm = new MulticlassSupportVectorMachine(400, kernel, 26);

            // Create the learning algorithm using the machine and the training data
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, input, output);

            // Extract training parameters from the interface
            double complexity = (double)numComplexity.Value;
            double epsilon = (double)numEpsilon.Value;
            double tolerance = (double)numTolerance.Value;

            // Configure the learning algorithm
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs);
                smo.Complexity = complexity;
                smo.Epsilon = epsilon;
                smo.Tolerance = tolerance;
                return smo;
            };


            //lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();
            //toolStripProgressBar1.Value = 0;
            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            double error = ml.Run();

            sw.Stop();

            //lbStatus.Text = String.Format(
              //  "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.",
              //  sw.ElapsedMilliseconds, error);

            btnClassify.Enabled = true;


            
            dgvMachines.Rows.Clear();
            int k = 1, s = 0;
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < i; j++, k++)
                {
                    var machine = ksvm[i, j];

                    int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, machine.SupportVectors.Length, machine.Threshold);
                    dgvMachines.Rows[c].Tag = machine;

                    s += machine.SupportVectors.Length;
                }
            }
            
            // approximate size in bytes = 
            //   number of support vectors *
            //   number of doubles in a support vector *
            //   size of double
            //int bytes = s * 1024 * sizeof(double);
            //float mb = bytes / (1024 * 1024);
            //lbSize.Text = String.Format("{0} ({1} MB)", s, mb);
            using (FileStream file = new FileStream("svm.bin", FileMode.Create, FileAccess.ReadWrite))
            {
                BinaryFormatter bin = new BinaryFormatter();
                bin.Serialize(file, ksvm);
            }
        }
        private void dgvMachines_CurrentCellChanged(object sender, EventArgs e)
        {

            DataGridViewRow row = dgvMachines.CurrentRow;
            if (row == null) return;

            KernelSupportVectorMachine m = row.Tag as KernelSupportVectorMachine;
            if (m == null) return;

            double max = m.Weights.Max();
            double min = m.Weights.Min();
            dgvVectors.Rows.Clear();
            for (int i = 0; i < m.SupportVectors.Length; i++)
            {
                var vector = m.SupportVectors[i];
                var weight = m.Weights[i];
                double[] f = vector.Apply(x => x *
                    uitocr.Math.Tools.Scale(min, max, -1, 1, weight));
                dgvVectors.Rows.Add(Export(f), m.Weights[i]);
            }

        }

        private Bitmap Export(double[] features)
        {
            Bitmap bitmap = new Bitmap(20, 20, System.Drawing.Imaging.PixelFormat.Format32bppRgb);

            for (int i = 0; i < 20; i++)
                for (int j = 0; j < 20; j++)
                {
                    double v = features[i * 20 + j];
                    v = 255 - Math.Max(0, Math.Min(255, Math.Abs(v) * 255));
                    bitmap.SetPixel(j, i, Color.FromArgb((int)v, (int)v, (int)v));
                }

            return bitmap;
        }

        private void Recognize(object sender, EventArgs e)
        {
            lblResult.Visible = false;
            lblPreResult.Visible = false;
//            Graphics g = bufferedPanel1.CreateGraphics();
  //          g.DrawImage(bm, new Point(0, 0));

            double[] input = picRecognition.Letter.GetEquivalentVector(20, 20);
    //        double[] input = getEquivalentVectorFromImage(20, 20, bm);
            //currentLetter.AddInstance(lt);
            int winner = 0;
            int current = 1;

            //lstSimilarityTests.Items.Clear();
            //picCompressed.Invalidate();
            lblResult.Text = "";


            while (current < Alphabet.LetterCount)
            {
                try
                {
                    using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + winner.ToString("00") + current.ToString("00") + ".ndn", FileMode.Open))
                    {
                        IFormatter formatter = new BinaryFormatter();
                        INetwork network = (INetwork)formatter.Deserialize(stream);

                        double[] output = network.Run(input);
                        string result = letters[winner] + " vs " + letters[current] + " = ";
                        if (output[1] > output[0])
                        {
                            winner = current;
                        }
                        result += letters[winner];
              //          lstSimilarityTests.Items.Add(result);
                //        lstSimilarityTests.TopIndex = lstSimilarityTests.Items.Count - (int)(lstSimilarityTests.Height / lstSimilarityTests.ItemHeight);
                    }
                    current++;
                }
                catch (Exception)
                {
                    MessageBox.Show("Failed to load saved neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                    return;
                }
            }
            lblResult.Text = letters[winner];
            lblResult.Visible = true;
            //int c = Classify(bm);
            lblPreResult.Visible = true;
        }

        private void lblResult_Click(object sender, EventArgs e)
        {

        }

        private void button2_Click(object sender, EventArgs e)
        {
            tabControl1.SelectedIndex = checkBox2.Checked ? 2 : 3;
        }

    }
}