﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NeuralNetworkPro;
using NeuralNetworkPro.Activation_Functions;
using QrGenerator;

namespace Main
{
    public class Program
    {
        public static double[] DecimalToBinary(int data, int num_digit)
        {
            double[] result = new double[num_digit];
            int rem = 0;
            int num = data;
            int i = num_digit;
            while (num > 0)
            {
                rem = num % 2;
                num = num / 2;
                result[--i] = (double)rem;
            }
            return result;
        }

        static double BynaryToDecimal(double[] data)
        {
            double res = 0;
            var dat = data.Select(d => Math.Round(d)).ToArray();
            for (int i = 0; i < dat.Length; i++)
            {
                res += Math.Pow(2, i) * dat[i];
            }
            return res;
        }

        public static double[] DigitDecomposition(int data, int num_digits)
        {
            string num_string = data.ToString();
            double[] res = new double[num_digits];
            for (int i = 0; i < num_digits - num_string.Length; i++)
            {
                res[i] = 0.0;
            }
            for (int k = 0, i = num_digits - num_string.Length; i < num_digits; i++, k++)
			{
                res[i] = Double.Parse(num_string[k] + "");
			}
            return res;
        }

        static void PP(double[] res)
        {
            Console.Write("{ ");
            foreach (var item in res)
            {
                Console.Write(Math.Round(item,0) + ", ");
            }
            Console.WriteLine("}");
        }
        
        static void ConversionTest()
        {
            var epochs = 100000;
            var data_length = 100;
            var rnd = new Random();
            var nums = (from i in Enumerable.Range(0, data_length) select rnd.Next(0, 100)).ToArray();//, DecimalToBinary(i, 4))/*rnd.Next(0, 16)*/).ToArray();
            var data = new Tuple<double[][], double[][]>(nums.Select(s => DigitDecomposition(s,3)).ToArray(),nums.Select(n => DecimalToBinary(n, 7)).ToArray());


            var network = new NeuralNetwork(new SigmoidFunction(1), 3, new int[] { 32, 7 });

            var teacher = new BackPropagation(network, 0.8, 0.5);
            var error = 0.0;
            for (int i = 0; i < epochs; i++)
            {
                error = teacher.RunEpoch(data.Item1, data.Item2);
                if (i % 25 == 0)
                    Console.WriteLine(error);
            }
            while (true)
            {
                Console.WriteLine("enter a number");
                try
                {
                    int num = int.Parse(Console.ReadLine());
                    if (num < 0)
                        break;
                    PP(network.Compute(DigitDecomposition(num,3)));
                }
                catch (Exception)
                {
                    Console.WriteLine("Overflow pls...");
                }
            }
        }
        static void OddEvenBinary()
        {
            var epochs = 100;
            var data_length = 15;
            var rnd = new Random();
            var nums = (from i in Enumerable.Range(0, data_length) select rnd.Next(0, int.MaxValue)).ToArray();//, DecimalToBinary(i, 4))/*rnd.Next(0, 16)*/).ToArray();
            var data = new Tuple<double[][], double[][]>(nums.Select(n => DecimalToBinary(n, 32)).ToArray(), nums.Select(n => new double[] { (n + 1) % 2 }).ToArray());


            var network = new NeuralNetwork(new SigmoidFunction(1), 32, new int[] { 10, 1 });

            var teacher = new BackPropagation(network, 0.9, 0.5);

            var error = 0.0;
            for (int i = 0; i < epochs; i++)
            {
                error = teacher.RunEpoch(data.Item1, data.Item2);
                Console.WriteLine(error);
                //if (i % 10 == 0)
                //{
                //    teacher.Save("net");
                //    teacher = null;
                //    teacher = BackPropagation.Load("net");
                //    network = teacher.Network;
                //}
            }

            //network.Save("net");
            //network = null;
            //network = NeuralNetwork.Load("net");

            var prune = new Pruning(network, data.Item1);
            prune.Prune();

            while (true)
            {
                Console.WriteLine("enter a number");
                try
                {
                    int num = int.Parse(Console.ReadLine());
                    if (num < 0)
                        break;
                    PP(network.Compute(DecimalToBinary(num, 32)));
                }
                catch (Exception)
                {
                    Console.WriteLine("Overflow pls...");
                }
            }
        }

        static void MioOddEvenBinary()
        {
            var epochs = 10000;
            var data_length = 150;
            var rnd = new Random();
            var nums = (from i in Enumerable.Range(0, data_length) select rnd.Next(0, int.MaxValue)).ToArray();//, DecimalToBinary(i, 4))/*rnd.Next(0, 16)*/).ToArray();
            var data = new Tuple<double[][], double[][]>(nums.Select(n => DecimalToBinary(n, 32)).ToArray(), nums.Select(n => new double[] { (n + 1) % 2 }).ToArray());


            var network = new NeuralNetwork(new SigmoidFunction(1), 32, new int[] { 32, 32, 1 });

            var teacher = new BackPropagation(network, 0.9, 0.5);

            var error = double.MaxValue;
            for (int i = 0; i < epochs && error > 0.01; i++)
            {
                error = teacher.RunEpoch(data.Item1, data.Item2);
                Console.WriteLine(error);
                //if (i % 10 == 0)
                //{
                //    teacher.Save("net");
                //    teacher = null;
                //    teacher = BackPropagation.Load("net");
                //    network = teacher.Network;
                //}
            }

            network.Save("net");
            network = null;
            network = NeuralNetwork.Load("net");
            var old_prune_error = 0;
            var prune_error = 0;
            var prune = new Pruning(network, data.Item1);
            var prune_test = 1000;
            var prune_nums = (from i in Enumerable.Range(0, prune_test) select rnd.Next(0, int.MaxValue)).ToArray();//, DecimalToBinary(i, 4))/*rnd.Next(0, 16)*/).ToArray();
            var prune_data = new Tuple<double[][], double[][]>(prune_nums.Select(n => DecimalToBinary(n, 32)).ToArray(), prune_nums.Select(n => new double[] { (n + 1) % 2 }).ToArray());
            for (int i = 0; i < prune_test; i++)
            {
                var res = network.Compute(prune_data.Item1[i]);
                if ((int)Math.Round(res[0]) != prune_data.Item2[i][0])
                {
                    //Console.WriteLine("error given: " + prune_nums[i]);
                    prune_error++;
                }
            }
            do
            {
                Console.WriteLine(network);
                Console.WriteLine("continue pruning?");
                network.Backup();
                //try
                //{
                if (prune.CanPrune())
                    prune.Prune();
                else
                    break;
                //}
                //catch (Exception e)
                //{
                //    Console.WriteLine(e.Message);
                //    break;
                //}
                prune_error = 0;
                for (int i = 0; i < prune_test; i++)
                {
                    var res = network.Compute(prune_data.Item1[i]);
                    if ((int)Math.Round(res[0]) != prune_data.Item2[i][0])
                    {
                        //Console.WriteLine("error given: " + prune_nums[i]);
                        prune_error++;
                    }
                }
                Console.WriteLine("pruned error = " + prune_error);
            } while (prune_error <= old_prune_error);

            network = NeuralNetwork.Restore();

            while (true)
            {
                Console.WriteLine("enter a number");
                try
                {
                    var intent = Console.ReadLine();
                    if (intent == "p")
                    {
                        prune.Prune();
                    }
                    else
                    {
                        int num = int.Parse(intent);
                        if (num < 0)
                            break;
                        PP(network.Compute(DecimalToBinary(num, 32)));
                    }
                }
                catch (Exception)
                {
                    Console.WriteLine("Overflow pls...");
                }
            }
        }

        private static double IsSimmetric(double[] array)
        {
            var rev = array.Reverse().ToArray();
            for (int i = 0; i < array.Length; i++)
            {
                if (array[i] != rev[i])
                    return 0;
            }
            return 1;
        }

        private static double[] GenerateSequence(int precision, bool necessary_simmetric = false)
        {
            Random rnd = new Random();
            if (necessary_simmetric)
            {
                var num = DecimalToBinary(rnd.Next(0, 65536), precision);
                var prefix = num.Take(16);
                return prefix.Concat(prefix.Reverse()).ToArray();
            }
            else
            {
                return DecimalToBinary(rnd.Next(0, 256), precision);
            }
        }

        public static void SimmetryProblem()
        {
            var input = 16;
            var epochs = 10000;
            var data_length = 5000;
            var rnd = new Random();
            var nums = new List<double[]>();
            var labels = new List<double[]>();

            for (int i = 0; i < data_length; i++)
            {
                nums.Add(GenerateSequence(input, i % 5 == 0));
            }

            var data = new Tuple<double[][], double[][]>(nums.ToArray(), nums.Select(n => new double[] { IsSimmetric(n) }).ToArray());


            var network = new NeuralNetwork(new SigmoidFunction(1), input, new int[] { 10, 10, 1 });

            var teacher = new BackPropagation(network, 0.9, 0.8);

            var error = double.MaxValue;
            for (int i = 0; error > 0.01; i++)
            {
                error = teacher.RunEpoch(data.Item1, data.Item2);
                Console.WriteLine(error);
                //if (i % 10 == 0)
                //{
                //    teacher.Save("net");
                //    teacher = null;
                //    teacher = BackPropagation.Load("net");
                //    network = teacher.Network;
                //}
            }

            network.Save("net");
            network = null;
            network = NeuralNetwork.Load("net");
            var old_prune_error = 0;
            var prune_error = 0;
            var prune = new Pruning(network, data.Item1);
            var prune_test = 1000;
            var prune_nums = new List<double[]>();
            for (int i = 0; i < prune_test; i++)
            {
                prune_nums.Add(GenerateSequence(input, i % 5 == 0));
            }
            var prune_data = new Tuple<double[][], double[][]>(prune_nums.ToArray(), prune_nums.Select(n => new double[] { IsSimmetric(n) }).ToArray());
            for (int i = 0; i < prune_test; i++)
            {
                var res = network.Compute(prune_data.Item1[i]);
                if ((int)Math.Round(res[0]) != prune_data.Item2[i][0])
                {
                    //Console.WriteLine("error given: " + prune_nums[i]);
                    prune_error++;
                }
            }
            do
            {
                Console.WriteLine(network);
                Console.WriteLine("continue pruning?");
                network.Backup();
                //try
                //{
                if (prune.CanPrune())
                    prune.Prune();
                else
                    break;
                //}
                //catch (Exception e)
                //{
                //    Console.WriteLine(e.Message);
                //    break;
                //}
                prune_error = 0;
                for (int i = 0; i < prune_test; i++)
                {
                    var res = network.Compute(prune_data.Item1[i]);
                    if ((int)Math.Round(res[0]) != prune_data.Item2[i][0])
                    {
                        //Console.WriteLine("error given: " + prune_nums[i]);
                        prune_error++;
                    }
                }
                Console.WriteLine("pruned error = " + prune_error);
            } while (prune_error <= old_prune_error);

            network = NeuralNetwork.Restore();

            while (true)
            {
                Console.WriteLine("simmetric or not? (y/n)");
                try
                {
                    var intent = Console.ReadLine();
                    if (intent == "p")
                    {
                        prune.Prune();
                    }
                    else
                    {
                        var num = GenerateSequence(input, intent == "y");
                        Console.WriteLine("sequence:");
                        PP(num);
                        PP(network.Compute(num));
                    }
                }
                catch (Exception)
                {
                    Console.WriteLine("Overflow pls...");
                }
            }
        }

        static void ClassifyIris()
        {
            double threshold = 5.0;
            var epochs = 1000;
            var rnd = new Random();
            var rows = File.ReadAllLines("Data/iris.data").Where(s => s.Length > 0).ToArray();
            var count = rows.Length;
            var features = new List<double[]>();
            var labels = new List<double[]>();
            var train_f = new List<double[]>();
            var train_l = new List<double[]>();
            var network = new NeuralNetwork(new SigmoidFunction(0.01), 4, new int[] { 30, 30, 30, 30, 3 });
            var teacher = new BackPropagation(network, 0.9, 0.5);

            #region Data parsing and splitting
            foreach (var entry in rows)
            {
                var vals = entry.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries);
                var feature = vals.Take(4).Select(s => double.Parse(s)).ToArray();
                features.Add(feature);
                switch (vals[4])
                {
                    case "Iris-setosa":
                        labels.Add(new double[] { 0, 0, 1 });
                        break;
                    case "Iris-versicolor":
                        labels.Add(new double[] { 0, 1, 0 });
                        break;
                    case "Iris-virginica":
                        labels.Add(new double[] { 1, 0, 0 });
                        break;
                    default:
                        break;
                }
            }

            for (int i = 0; i < count; i++)
            {
                var idx = rnd.Next(0, features.Count);
                train_f.Add(features[idx]);
                features.RemoveAt(idx);
                train_l.Add(labels[idx]);
                labels.RemoveAt(idx);
            }
            #endregion

            double error = 0.0;
            ushort j = 0;

            do
            {
                error = teacher.RunEpoch(train_f.ToArray(), train_l.ToArray());
                if (j++ % 10 == 0)
                    Console.WriteLine(error);
            } while (error > threshold);

        }

        static void ClassifySemeion()
        {
            var epochs = 1000;
            var rnd = new Random();
            var rows = File.ReadAllLines("Data/semeion.data").Where(s => s.Length > 0).ToArray();
            var count = rows.Length;
            var features = new List<double[]>();
            var labels = new List<double[]>();
            var train_f = new List<double[]>();
            var train_l = new List<double[]>();
            var network = new NeuralNetwork(new SigmoidFunction(0.01), 256, new int[] { 300, 30, 10 });
            var teacher = new BackPropagation(network, 0.9, 0.5);

            #region Data parsing and splitting
            foreach (var entry in rows)
            {
                var vals = entry.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
                var feature = vals.Take(256).Select(s => double.Parse(s)).ToArray();
                features.Add(feature);
                var label = vals.Skip(256).Select(s => double.Parse(s)).ToArray();
                labels.Add(label);
            }

            for (int i = 0; i < count; i++)
            {
                var idx = rnd.Next(0, features.Count);
                train_f.Add(features[idx]);
                features.RemoveAt(idx);
                train_l.Add(labels[idx]);
                labels.RemoveAt(idx);
            }
            #endregion



            for (int i = 0; i < epochs; i++)
            {
                var error = teacher.RunEpoch(train_f.ToArray(), train_l.ToArray());
                Console.WriteLine(error);
            }

        }

        static void OddEvenDecimal()
        {
            var epochs = 10000;
            var data_length = 1000;
            var rnd = new Random();
            var nums = (from i in Enumerable.Range(0, data_length) select rnd.Next(0, 100)).ToArray();//, DecimalToBinary(i, 4))/*rnd.Next(0, 16)*/).ToArray();
            var data = new Tuple<double[][], double[][]>(nums.Select(s => DigitDecomposition(s, 3)).ToArray(), nums.Select(n => new double[] { n % 2 }).ToArray());


            var network = new NeuralNetwork(new SigmoidFunction(1), 3, new int[] { 40, 30, 1 });

            var teacher = new BackPropagation(network, 0.00001, 0.5);
            var error = 0.0;
            var j = 0;
            var chrono = new Stopwatch();
            for (int i = 0; i < epochs; i++)
            {
                error = teacher.RunEpoch(data.Item1, data.Item2);
                if (j++ % 1 == 0)
                {
                    chrono.Stop();
                    Console.WriteLine("epoch: " + (j - 1) + " error: " + error + " in: " + chrono.Elapsed.TotalSeconds);
                    chrono.Restart();
                }
            }


            while (true)
            {
                Console.WriteLine("enter a number");
                try
                {
                    int num = int.Parse(Console.ReadLine());
                    if (num < 0)
                        break;
                    PP(network.Compute(DigitDecomposition(num, 3)));
                }
                catch (Exception)
                {
                    Console.WriteLine("Overflow pls...");
                }
            }
        }

        static int CheckResult(double[] a, double[] b)
        {
            if (a.Length != b.Length)
                throw new Exception("error");
            int error = 0;
            for (int i = 0; i < a.Length; i++)
            {
                error += (int)Math.Abs(Math.Round(a[i]) - Math.Round(b[i]));
            }
            return error;
        }

        static void ClassifyQR()
        {
            var epochs = 300;
            Console.WriteLine("Insert num max");
            var max = int.Parse(Console.ReadLine());
            Console.WriteLine("insert sample count");
            var count = int.Parse(Console.ReadLine());
            var data = QrGenerator.QrGenerator.CreateDataSet(count, max, 231087);
            var features = data.Select(t => t.Item2.Select(b => (double)b).ToArray()).ToArray();
            var labels = data.Select(t => DecimalToBinary((int)t.Item1, 10)).ToArray();
            var network = new NeuralNetwork(new SigmoidFunction(0.1), features[0].Length, new int[] { 50, 30, 10 });
            var teacher = new BackPropagation(network, 5, 0.5);
            int j = 1;
            var error = 0.0;
            do
            {
                for (int i = 0; i < epochs; i++)
                {
                    error = teacher.RunEpoch(features, labels);
                    Console.WriteLine("epoch " + i + " error " +error);
                }
                Console.WriteLine("Done " + epochs + " iteration. Save? (y/n)");
                //if (Console.ReadLine() != "n")
                //{
                    teacher.Save("QrCode_mid" + epochs * j);
                //}
                Console.WriteLine("Continue with other " + epochs + " epochs? (y/n)");
                //if (Console.ReadLine() == "n")
                //{
                //    break;
                //}
                j++;
            }
            while (error > 5);

            network.Save("net");
            network = null;
            network = NeuralNetwork.Load("net");
            var old_prune_error = int.MaxValue;
            var prune_error = int.MaxValue;
            var prune_test = 1000;
            var prune_data = QrGenerator.QrGenerator.CreateDataSet(prune_test, max, 231087);
            var prune_features = prune_data.Select(t => t.Item2.Select(b => (double)b).ToArray()).ToArray();
            var prune_labels = prune_data.Select(t => DecimalToBinary((int)t.Item1, 10)).ToArray();
            var prune = new Pruning(network, features);
            do
            {
                Console.WriteLine(network);
                Console.WriteLine("continue pruning?");
                network.Backup();
                //try
                //{
                if (prune.CanPrune())
                    prune.Prune();
                else
                    break;
                //}
                //catch (Exception e)
                //{
                //    Console.WriteLine(e.Message);
                //    break;
                //}
                old_prune_error = prune_error;
                prune_error = 0;
                for (int i = 0; i < features.Length; i++)
                {
                    var res = network.Compute(prune_features[i]);
                    prune_error = CheckResult(res, prune_labels[i]);
                }
                Console.WriteLine("pruned error = " + prune_error);
            } while (prune_error <= old_prune_error);

            Console.WriteLine("Learning finished. Save? (y/n)");
            if (Console.ReadLine() != "n")
            {
                network.Save("QrCode_final_300_100" + epochs * j);
            }
        }

        static void LoadQR()
        {
            NeuralNetwork network;
            var features;
            try
            {
                network = NeuralNetwork.Load("aQrCode_final_300_1004000");
            }
            catch (Exception)
            {
                var teacher = BackPropagation.Load("QrCode_mid9900");
                Console.WriteLine(teacher.LastEpochError);
                network = teacher.Network;
                features = teacher.
            }

            var old_prune_error = int.MaxValue;
            var prune_error = int.MaxValue;
            var prune_test = 1000;
            var prune_data = QrGenerator.QrGenerator.CreateDataSet(prune_test, max, 231087);
            var prune_features = prune_data.Select(t => t.Item2.Select(b => (double)b).ToArray()).ToArray();
            var prune_labels = prune_data.Select(t => DecimalToBinary((int)t.Item1, 10)).ToArray();
            var prune = new Pruning(network, );
            do
            {
                Console.WriteLine(network);
                Console.WriteLine("continue pruning?");
                network.Backup();
                //try
                //{
                if (prune.CanPrune())
                    prune.Prune();
                else
                    break;
                //}
                //catch (Exception e)
                //{
                //    Console.WriteLine(e.Message);
                //    break;
                //}
                old_prune_error = prune_error;
                prune_error = 0;
                for (int i = 0; i < features.Length; i++)
                {
                    var res = network.Compute(prune_features[i]);
                    prune_error = CheckResult(res, prune_labels[i]);
                }
                Console.WriteLine("pruned error = " + prune_error);
            } while (prune_error <= old_prune_error);

            var data = QrGenerator.QrGenerator.CreateDataSet(100, 100, 231087);
            var ord = data.OrderBy(s => s.Item1).ToArray();
            PP(ord.Select(s => (double)s.Item1).ToArray());

            while (true)
            {
                Console.WriteLine("Enter a number");
                int input = int.Parse(Console.ReadLine());
                var qr = QrGenerator.QrGenerator.GetQrCode(input + "");
                var array = qr.Matrix.Array.ToArray().Select(d => (double)d).ToArray();
                var res = network.Compute(array);
                Console.WriteLine("was:");
                PP(DecimalToBinary(input, 10));
                Console.WriteLine("Net says");
                PP(res);
                //Console.WriteLine("Net says " + BynaryToDecimal());
            }
        }

        //static void XorTest()
        //{
        //    var epochs = 1000;
        //    var data_length = 4;
        //    var rnd = new Random();
        //    var nums = (from i in Enumerable.Range(0, data_length) select rnd.Next(0, 100)).ToArray();//, DecimalToBinary(i, 4))/*rnd.Next(0, 16)*/).ToArray();
        //    var data = new Tuple<double[][], double[][]>(nums.Select(s => DigitDecomposition(s, 3)).ToArray(), nums.Select(n => new double[] { n % 2 }).ToArray());


        //    var network = new NeuralNetwork(new SigmoidFunction(1), 2, new int[] { 2, 2, 1 });

        //    var teacher = new BackPropagation(network, 0.9, 0.5);
        //    var error = 0.0;
        //    var j = 0;
        //    var chrono = new Stopwatch();
        //    for (int i = 0; i < epochs; i++)
        //    {
        //        error = teacher.RunEpoch(data.Item1, data.Item2);
        //        if (j++ % 100 == 0)
        //        {
        //            chrono.Stop();
        //            Console.WriteLine("epoch: " + (j - 1) + " error: " + error + " in: " + chrono.Elapsed.TotalSeconds);
        //            chrono.Restart();
        //        }
        //    }


        //    while (true)
        //    {
        //        Console.WriteLine("enter a number");
        //        try
        //        {
        //            int num = int.Parse(Console.ReadLine());
        //            if (num < 0)
        //                break;
        //            PP(network.Compute(DigitDecomposition(num, 3)));
        //        }
        //        catch (Exception)
        //        {
        //            Console.WriteLine("Overflow pls...");
        //        }
        //    }
        //}

        static void Main(string[] args)
        {
            //Pruning.Solve();
            //ConversionTest();
            //OddEvenBinary();
            //OddEvenDecimal();
            //ClassifyIris();
            //ClassifySemeion();
            //ClassifyQR();
            //LoadQR();
            //MioOddEvenBinary();
            //SimmetryProblem();
            ClassifyQR();
        }
    }
}
