using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using ZedGraph;

namespace WindowsApplication1
{
    public partial class KNNForm : Form
    {
        private Element[] elements;

        public KNNForm(Element[] elements)
        {
            InitializeComponent();
            this.elements = elements;
            //tabControl1.TabPages[1].Hide();
            //tabControl1.TabPages[2].Hide();
            //tabControl1.TabPages[3].Hide();
            tabControl1.TabPages.RemoveAt(3);
            tabControl1.TabPages.RemoveAt(2);
            tabControl1.TabPages.RemoveAt(1);

            classifier = train(elements, 1);

            InitData();
        }

        private void InitData()
        {
            InitListView(elements, listView1);

            if (elements[0].FeatureValues.Length == 2)
            {
                InitGraph(zedGraphControl1, elements);
                tabControl1.TabPages.Add(tabPage2);
            }
        }

        private void InitListView(Element[] elements, ListView lv)
        {
            lv.Items.Clear();
            lv.Columns.Clear();
            ColumnHeader h1 = new ColumnHeader();
            h1.Text = "Lp";
            lv.Columns.Add(h1);

            int features = elements[0].FeatureValues.Length;
            for (int i = 0; i < features; i++)
            {
                ColumnHeader h = new ColumnHeader();
                h.Text = "Feature " + ((i + 1).ToString());
                lv.Columns.Add(h);
            }
            ColumnHeader h2 = new ColumnHeader();
            h2.Text = "Class";
            lv.Columns.Add(h2);

            int k = 1;
            foreach (Element el in elements)
            {
                string[] vals = new string[features + 2];
                vals[0] = k.ToString();
                k++;
                int j = 0;
                for (j = 0; j < el.FeatureValues.Length; j++)
                    vals[j + 1] = el.FeatureValues[j].ToString();
                vals[j + 1] = el.Class.ToString();

                lv.Items.Add(new ListViewItem(vals));
            }
        }

        private void InitGraph(ZedGraph.ZedGraphControl ctrl, Element[] els)
        {
            GraphPane pane = ctrl.GraphPane;

            // Set the titles and axis labels
            pane.Title.Text = "Feature to class graph";
            pane.XAxis.Title.Text = "Feature 1";
            pane.YAxis.Title.Text = "Feature 2";
            
            // Make up some data points based on the Sine function
            PointPairList list = new PointPairList();
            int maxClass = els[0].Class;
            for (int i = 0; i < els.Length; i++)
            {
                list.Add(els[i].FeatureValues[0], els[i].FeatureValues[1], (double)els[i].Class);
                if (els[i].Class > maxClass)
                    maxClass = els[i].Class;
            }

            pane.CurveList.Clear();
            LineItem myCurve = pane.AddCurve("Elements", list, Color.Transparent, SymbolType.Diamond);
            
            // Fill the symbols with white 
            Color[] colors = { Color.Red, Color.Yellow, Color.Blue, Color.Black, Color.Orange, Color.Green };
            myCurve.Symbol.Fill = new Fill(colors);
            myCurve.Symbol.Fill.Type = FillType.GradientByZ;
            myCurve.Symbol.Fill.RangeMin = 0;
            myCurve.Symbol.Fill.RangeMax = maxClass;


            // Fill the axis background with a gradient
            pane.Chart.Fill = new Fill(Color.White, Color.LightGray, 45.0f);

            ctrl.AxisChange();
            // Make sure the Graph gets redrawn
            ctrl.Invalidate();
        }

        private KNNClassifier train(Element[] elements, int k)
        {
            int len = elements[0].FeatureValues.Length;
            double[] arrMin = new double[len];
            Array.Copy(elements[0].FeatureValues, arrMin, len);

            double[] arrMax = new double[len];
            Array.Copy(arrMin, arrMax, len);

            Dictionary<int, List<Element>> d = getClasses2Elements(elements);
            foreach (int cls in d.Keys)
            {
                List<Element> elementsOfIClass = d[cls];
                if (elementsOfIClass.Count == 0)
                    continue;

                for (int j = 0; j < elementsOfIClass.Count; j++)
                {
                    double[] arrTmp = elementsOfIClass[j].FeatureValues;
                    for (int n = 0; n < arrTmp.Length; n++)
                    {
                        if (arrTmp[n] > arrMax[n])
                            arrMax[n] = arrTmp[n];
                        if (arrTmp[n] < arrMin[n])
                            arrMin[n] = arrTmp[n];
                    }
                }
            }

            double[] arrNorm = new double[arrMin.Length];
            for (int n = 0; n < arrNorm.Length; n++)
                arrNorm[n] = 1.0D / (arrMax[n] - arrMin[n]);

            KNNClassifier clss = new KNNClassifier();
            clss.trainSet = elements;
            clss.normalizedValues = arrNorm;
            clss.k = k;

            return clss;
        }

        private Dictionary<int, List<Element>> getClasses2Elements(Element[] els)
        {
            Dictionary<int, List<Element>> d = new Dictionary<int, List<Element>>();
            for (int i = 0; i < els.Length; i++)
            {
                if (d.ContainsKey(els[i].Class) == false)
                    d[els[i].Class] = new List<Element>();
                d[els[i].Class].Add(els[i]);
            }
            return d;
        }

        private int testKNN(Element[] elements, double[] features, int k)
        {
            LinkedList<Distance> trainVectors = new LinkedList<Distance>();
            for (int n = 0; n < elements.Length; n++)
            {
                Distance d = new Distance();
                d.dist = distance(features, elements[n].FeatureValues);
                d.clss = elements[n].Class;
                trainVectors.AddFirst(d);
            }
            Distance[] trainArray = sort(trainVectors);

            Dictionary<int, int> class2Count = new Dictionary<int, int>();
            for (int i = 0; i < k; i++)
            {
                if (class2Count.ContainsKey(trainArray[i].clss) == false)
                    class2Count[trainArray[i].clss] = 1;
                else
                    class2Count[trainArray[i].clss]++;
            }
            int max = -1;
            int maxclss = 0 ;
            foreach (int clss in class2Count.Keys)
            {
                if (class2Count[clss] > max)
                {
                    max = class2Count[clss];
                    maxclss = clss;
                }
            }
            return maxclss;
        }

        public Distance[] sort(LinkedList<Distance> list)
        {
            Distance[] arr = new Distance[list.Count];
            int i = 0;
            foreach (Distance d in list)
            {
                arr[i] = d;
                i++;
            }
            Array.Sort(arr);
            return arr;
        }

        private double distance(double[] s1, double[] s2)
        {
            double sum = 0.0;
            for (int i = 0; i < s1.Length; i++)
            {
                double d = s1[i] - s2[i];
                sum += d*d;
            }
            return sum;
        }

        public class Distance : IComparable<Distance>
        {
            public double dist;
            public int clss;

            public int CompareTo(Distance d)
            {
                if (this.dist < d.dist)
                    return -1;
                if (this.dist > d.dist)
                    return 1;
                return 0;
            }
        }
        private KNNClassifier classifier;
        class KNNClassifier
        {
            public Element[] trainSet;
            public double[] normalizedValues;
            public int k;
        }

        private void button1_Click(object sender, EventArgs e)
        {
            /*
            int k = 1;
            classifier = train(elements, k);

            button4.Enabled = true;
             */
        }

        private void button4_Click(object sender, EventArgs e)
        {
            OpenFileDialog d = new OpenFileDialog();
            d.Filter = Editor.STATFilter;

            if (d.ShowDialog(this) != DialogResult.OK)
                return;
            
            Element[] testElements = Editor.readkNNElements(d.FileName);

            if (testElements[0].FeatureValues.Length != elements[0].FeatureValues.Length)
            {
                MessageBox.Show("Learning data and test data need to have the same features count.", "Incorrect data.",
                    MessageBoxButtons.OK, MessageBoxIcon.Warning);
                return;
            }

            for (int i = 0; i < testElements.Length; i++)
            {
                int cls = testKNN(elements, testElements[i].FeatureValues, classifier.k);
                testElements[i].Class = cls;
            }

            InitListView(testElements, listView2);

            if (testElements[0].FeatureValues.Length == 2)
            {
                InitGraph(zedGraphControl2, testElements);
            }

            if (tabControl1.TabPages.Count < 3)
                tabControl1.TabPages.Add(tabPage3);
            if (testElements[0].FeatureValues.Length == 2 && tabControl1.TabPages.Count < 4)
                tabControl1.TabPages.Add(tabPage4);
        }
    }
}