﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using DigitsRecognizer.NeuralNetwork;

namespace NeuralNetwork
{
    public class NeuralNetwork<T>
    {
        private List<double> InputLayer;
        private int NumberOfInputs;
        private List<List<Neuron>> HiddenLayer;
        private List<Neuron> OutputLayer;
        private double Z;                           // wartość oczekiwana WY
        private double TrainingFactor;              // współczynnik uczenia
        private int Epoch;                          // max liczba epok
        private double TotalDefect;                 // błąd sieci
        private double EdgeValue = 0.0001;         // wartość progowa
        private Random Random;

        // warstwa ukryta
        private List<int> ConfigurationHL;
        private int NumberOfLayersInHL;

        // warstwa wyjściowa
        private int NumberOfNeuronsInOL = 10;        // 10 klas dla 10 cyfr

        // zapis konfiguracji sieci
        private Writer<T> Writer;

        // odczyt konfiguracji sieci
        private Reader<T> Reader;
        private List<double> ExpectedValuesPattern;
        private List<int> NumberOfInputsPattern;
        private List<List<List<Neuron>>> HiddenLayerPattern;
        private List<List<Neuron>> OutputLayerPattern;
        private Dictionary<double, double> EstimatedDigitValuesDic;

        // odczyt zapisanych parametrów
        private List<T[]> LoadedParams;
        private Dictionary<T, List<T[]>> TrainingDictionary;

        public NeuralNetwork() 
        {
            InputLayer = new List<double>();
            HiddenLayer = new List<List<Neuron>>();
            OutputLayer = new List<Neuron>();
            ConfigurationHL = new List<int>();
            Writer = new Writer<T>();
            Reader = typeof(T) == typeof(double) ? new ReaderDoubleParams() as Reader<T> : new ReaderByteParams() as Reader<T>;
            ExpectedValuesPattern = new List<double>();
            NumberOfInputsPattern = new List<int>();
            HiddenLayerPattern = new List<List<List<Neuron>>>();
            OutputLayerPattern = new List<List<Neuron>>();
            LoadedParams = new List<T[]>();
            TrainingDictionary = new Dictionary<T, List<T[]>>();
            Random = new Random((int)DateTime.Now.Ticks);
            EstimatedDigitValuesDic = new Dictionary<double, double>();
        }

        /// <summary>
        /// Tworzy nadzorowaną, jednokierunkową, liniową sztuczną sieć 
        /// </summary>
        /// <param name="data">dane WE; przyjmowane jest tablica double bądź byte</param>
        /// <param name="configurationHL">konfiguracja warstwy ukrytej sieci; kolejne elementy tablicy to liczba neuronów w danej warstwie warstwy ukrytej
        /// gdzie indeks danego elmenetu oznacza indeks warstwy np. int[] tab = int[]{3,2} oznacza 2 warstwy w warstwie ukrytej sieci, gdzie 0. ma 3 neurony,
        /// 1. 2 neurony; podająć null konfiguracja sieci zostanie stworzona automatycznie na podstawie l. WE i WY.</param>
        /// <param name="numberOfNeuronsInOL">liczba neuronów w wyjściowej warstwie sieci; podaje się z reguły liczbę zgodną z ilościa klas jakie ma
        /// wyznaczać sieć neuronowa</param>
        public NeuralNetwork(T[] data, int[] configurationHL, int numberOfNeuronsInOL)
            : this()
        {
            FillInputLayer(data);
            CreateHiddenLayer(data, configurationHL, numberOfNeuronsInOL);
            CreateOutputLayer(numberOfNeuronsInOL);
            FillNeuronsWeights();
        }

        private void FillInputLayer(T[] data) 
        {
            NumberOfInputs = data.Length;
            InputLayer.Clear();

            for (int i = 0; i < NumberOfInputs; i++) 
            {
                InputLayer.Add(Convert.ToDouble(data[i]));
            }
            if(typeof(T) == typeof(double))
            {
                Normalize();
            }
        }

        private void Normalize() 
        {
            // skalowanie względem odchylenia od wartości średniej
            double averageVal = InputLayer.Average();

            for (int i = 0; i < NumberOfInputs; i++)
            {

                InputLayer[i] = InputLayer[i] != 0 ? (InputLayer[i] - averageVal) / averageVal : 0;
            }
        }

        private void CreateHiddenLayer(T[] data, int[] configurationHL, int numberOfNeuronsInOL) 
        {
            HiddenLayer.Clear();
            ConfigurationHL.Clear();
            if (configurationHL == null) 
            { 
                // ustalenie jednej warstwy ukrytej z liczbą neuronów adekwatną do l. WE i WY sieci (struktura geometryczna)
                NumberOfLayersInHL = 1;
                configurationHL = new int[]{ (int)Math.Sqrt(data.Length*numberOfNeuronsInOL) };
            }
            else
            {
                NumberOfLayersInHL = configurationHL.Length;
            }
            for (int i = 0; i < NumberOfLayersInHL; i++)
            {
                List<Neuron> listTmp = new List<Neuron>();
                int j = 0;
                for (; j < configurationHL[i]; j++) 
                {
                    Neuron neuron = new Neuron();
                    // liczba sygnałów WE
                    if (i == 0)
                    {
                        // dla 1. HL wszystkie WE
                        neuron.NumberOfInputSignals = NumberOfInputs;
                    }
                    else 
                    {
                        // dla pozostalych HL tyle ile neuronów we wcześniejszej warstwie
                        neuron.NumberOfInputSignals = HiddenLayer[i - 1].Count;
                    }
                    listTmp.Add(neuron);
                }
                HiddenLayer.Add(listTmp);
                ConfigurationHL.Add(j);
            }
        }

        private void CreateOutputLayer(int configurationOL) 
        {
            OutputLayer.Clear();
            NumberOfNeuronsInOL = configurationOL;
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                Neuron neuron = new Neuron();
                neuron.NumberOfInputSignals = HiddenLayer.Last().Count;
                OutputLayer.Add(neuron);
            }
        }

        private void FillNeuronsWeights() 
        {
            Random r = new Random((int)DateTime.Now.Ticks);
            FillNeuronsWeightsForHL(r);
            FillNeuronsWeightsForOL(r);
        }

        private void FillNeuronsWeightsForHL(Random random) 
        {
            for (int i = 0; i < NumberOfLayersInHL; i++)
            {
                for (int j = 0; j < ConfigurationHL[i]; j++)
                {
                    for(int k=0; k< HiddenLayer[i][j].NumberOfInputSignals; k++)
                    {
                        HiddenLayer[i][j][k] = new Dendrit(random.NextDouble());
                    }
                }
            }
        }

        private void FillNeuronsWeightsForOL(Random random)
        {
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                for(int j = 0; j < OutputLayer[i].NumberOfInputSignals; j++)
                {
                    OutputLayer[i][j] = new Dendrit(random.NextDouble());
                }
            }
        }

        /// <summary>
        /// Trenuje sieć neuronową za pomocą algorytmu wstecznej propagacji
        /// </summary>
        /// <param name="z">parametr określający oczekiwane WY sieci</param>
        /// <param name="n">parametr określający współczynnik uczenia; powinien posiadać jak najmniejszą wartość większą od 0, np 0.0001</param>
        /// <param name="epoch">parametr opcyjny określający liczbę epok (jedna epoka - jedna pełnej aktualizacja wag we wszystkich neuronach);
        /// algorytm będzie się wykonywał przez zadaną liczbę epok</param>
        /// <param name="edgeValue">wartość progowa; maksymalny dopuszczalny próg błędu przy trenowaniu sieci</param>
        /// <param name="login">login użytkownika</param>
        public void Train(/*double z,*/ double n, int? epoch, double edgeValue, string login)
        {
            // wczytanie danych z pliku 
            Reader.ReadParams(ref LoadedParams, login);
            CreateTrainingDictionary();
            DataProcessing();
            Reader.Clear(login);

            // trening (stworzenie wzorców) dla każdego klucza 
            foreach (var key in TrainingDictionary.Keys) 
            {
                FillInputLayer(TrainingDictionary[key].First());
                CreateHiddenLayer(TrainingDictionary[key].First(), null, NumberOfNeuronsInOL);
                CreateOutputLayer(NumberOfNeuronsInOL);
                FillNeuronsWeights();
                FillConstans(Convert.ToDouble(key), n, epoch, edgeValue);
                
                ComputeAxonsForHL();
                ComputeAxonsForOL();
                TotalDefect = ComputeTotalDefect();
                double totalDefectAfterChange = TotalDefect;
                bool stopTraining = false;
                int numOfTrainingInstances = TrainingDictionary[key].Count;
                int iterations = 0;
                int randomIndex = 0;
                if (Epoch > 0)
                {
                    int counter = Epoch;
                    int numOfIterations = counter / numOfTrainingInstances;
                    int index = 0;
                    while (counter >= 0)
                    {
                        BackPropagation();
                        if (counter % numOfIterations == 0 && counter > 0) 
                        {
                            FillInputLayer(TrainingDictionary[key][index]);
                            ComputeAxonsForHL();
                            ComputeAxonsForOL();
                            index++;
                        }
                        counter--;
                    }
                }
                else
                {
                    while (!stopTraining)
                    {
                        do
                        {
                            TotalDefect = totalDefectAfterChange;
                            BackPropagation();
                            totalDefectAfterChange = ComputeTotalDefect();
                            iterations++;
                        } while (totalDefectAfterChange < TotalDefect && totalDefectAfterChange > EdgeValue);

                        for (int i = 0; i < numOfTrainingInstances; i++)
                        {
                            FillInputLayer(TrainingDictionary[key][i]);
                            ComputeAxonsForHL();
                            ComputeAxonsForOL();
                            totalDefectAfterChange = ComputeTotalDefect();
                            //stopTraining = (totalDefectAfterChange - EdgeValue) <= TotalDefect;
                            stopTraining = totalDefectAfterChange <= TotalDefect;
                            if (!stopTraining) 
                            {
                                // losowanie kolejnej poprawianej instancji
                                if (numOfTrainingInstances > 2)
                                {
                                    randomIndex = Random.Next(0, numOfTrainingInstances);
                                    FillInputLayer(TrainingDictionary[key][randomIndex]);
                                    ComputeAxonsForHL();
                                    ComputeAxonsForOL();
                                    totalDefectAfterChange = ComputeTotalDefect();
                                }
                                break;
                            }
                        }
                    }
                }
                Writer.SavaNeuralNetworkParams(Z, NumberOfInputs, HiddenLayer, OutputLayer, login, WriteReadDestination.OneFile);
            }
        }

        protected void FillConstans(double z, double n, int? epoch, double edgeValue) 
        {
            Z = z/10;  // przesunięcie przecinka gdyż w aksonie wartość z zakresu 0.0 - 0.99
            TrainingFactor = n;
            Epoch = epoch != null ? (int)epoch : 0;
            EdgeValue = edgeValue;
        }

        private void ComputeAxonsForHL() 
        {
            for (int i = 0; i < NumberOfLayersInHL; i++)
            {
                for (int j = 0; j < ConfigurationHL[i]; j++)
                {
                    for (int k = 0; k < HiddenLayer[i][j].NumberOfInputSignals; k++)
                    {
                        if (i == 0)
                        {
                            HiddenLayer[i][j][k].InputSignal = InputLayer[k];
                        }
                        else
                        {
                            HiddenLayer[i][j][k].InputSignal = HiddenLayer[i - 1][k].Axon;
                        }
                    }
                    HiddenLayer[i][j].CountAxon(ActivationFunction.Signum);
                }
            }
        }

        private void ComputeAxonsForOL()
        {
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                for (int j = 0; j < OutputLayer[i].NumberOfInputSignals; j++)
                {
                    OutputLayer[i][j].InputSignal = HiddenLayer.Last()[j].Axon;
                }
                OutputLayer[i].CountAxon(ActivationFunction.Signum);
            }
        }

        private void BackPropagation() 
        {
            CalculateDefectInOL();
            CalculateDefectInHL();
            UpdateWeights();
            ComputeAxonsForHL();
            ComputeAxonsForOL();
        }

        private void CalculateDefectInOL() 
        {
            double derivative;
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                derivative = OutputLayer[i].Axon * (1 - OutputLayer[i].Axon);   // pochodna z axonu
                OutputLayer[i].Defect = (OutputLayer[i].Axon - Z) * derivative;
            }
        }

        private void CalculateDefectInHL()
        {
            for (int i = NumberOfLayersInHL-1; i >= 0; i--)
            {
                for (int j = 0; j < ConfigurationHL[i]; j++)
                {
                    double sumOfDefectsFromUpperLayer;
                    double derivative = HiddenLayer[i][j].Axon * (1 - HiddenLayer[i][j].Axon);  // pochodna z axonu

                    sumOfDefectsFromUpperLayer = 0;
                    if (i == NumberOfLayersInHL - 1)
                    {
                        for (int k = 0; k < NumberOfNeuronsInOL; k++)
                        {
                            sumOfDefectsFromUpperLayer += OutputLayer[k].Defect * OutputLayer[k][j].SynapseWeight;
                        }
                        HiddenLayer[i][j].Defect = sumOfDefectsFromUpperLayer * derivative;
                    }
                    else
                    {
                        for (int k = 0; k < HiddenLayer[i + 1].Count; k++)
                        {
                            sumOfDefectsFromUpperLayer += HiddenLayer[i + 1][k].Defect * HiddenLayer[i + 1][k][j].SynapseWeight;
                        }
                        HiddenLayer[i][j].Defect = sumOfDefectsFromUpperLayer * derivative;
                    }
                }
            }
        }

        private void UpdateWeights() 
        {
            UpdateWeightsForHL();
            UpdateWeightsForOL();
        }

        private void UpdateWeightsForHL()
        {
            for (int i = 0; i < NumberOfLayersInHL; i++)
            {
                for (int j = 0; j < ConfigurationHL[i]; j++)
                {
                    for (int k = 0; k < HiddenLayer[i][j].NumberOfInputSignals; k++)
                    {
                        HiddenLayer[i][j][k].SynapseWeight = HiddenLayer[i][j][k].SynapseWeight - (TrainingFactor * HiddenLayer[i][j].Defect * HiddenLayer[i][j][k].InputSignal);
                    }
                }
            }
        }

        private void UpdateWeightsForOL()
        {
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                for (int j = 0; j < OutputLayer[i].NumberOfInputSignals; j++)
                {
                    OutputLayer[i][j].SynapseWeight = OutputLayer[i][j].SynapseWeight - (TrainingFactor * OutputLayer[i].Defect * OutputLayer[i][j].InputSignal);
                }
            }
        }

        private double ComputeTotalDefect() 
        {
            double defect = 0;
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                defect += Math.Pow(OutputLayer[i].Axon - Z, 2.0f);
            }
            return defect *= 0.5;
        }

        private void CreateTrainingDictionary() 
        {
            TrainingDictionary.Clear();
            var keys = (from i in LoadedParams
                        where i.Length == 1
                        select i.First()).Distinct();

            List<T[]> values = new List<T[]>();

            foreach (var key in keys) 
            {
                for (int i = 0; i < LoadedParams.Count(); i += 2)
                {
                    if (LoadedParams[i].First().Equals(key)) 
                    {
                        values.Add(LoadedParams[i + 1]);
                    }
                }
                TrainingDictionary.Add(key, values);
                values = new List<T[]>();
            }            
        }

        /// <summary>
        /// Obrabia wczytywane dane tak aby miały tą samą ilość parametrów
        /// </summary>
        private void DataProcessing() 
        { 
            foreach(var key in TrainingDictionary.Keys)
            {
                int minLength = TrainingDictionary[key].Select(i => i.Length).Min();
                for (int i = 0; i < TrainingDictionary[key].Count; i++)
                {
                    if (TrainingDictionary[key][i].Length > minLength)
                    {
                        TrainingDictionary[key][i] = TrainingDictionary[key][i].Take(minLength).ToArray();
                    }
                }
            }
        }

        public double EstimateOutput(T[] data, string login, double range) 
        {
            EstimatedDigitValuesDic.Clear();
            if (!ExpectedValuesPattern.Any())
            {
                Reader.ReadNeuralNetworkParams(ref ExpectedValuesPattern, ref NumberOfInputsPattern, ref HiddenLayerPattern, ref OutputLayerPattern, login, WriteReadDestination.OneFile);
            }

            for (int i = 0; i < ExpectedValuesPattern.Count; i++) 
            {
                FillInputLayer(data);
                SetBeginnigParams(HiddenLayerPattern[i], OutputLayerPattern[i], NumberOfInputsPattern[i], ExpectedValuesPattern[i]);
                ComputeAxonsForHL();
                ComputeAxonsForOL();
                EstimateDigit();
            }
            return DetectCommand(range);
        }

        private void SetBeginnigParams(List<List<Neuron>> hl, List<Neuron> ol, int numOfinputs, double z) 
        {
            NumberOfInputs = numOfinputs;
            Z = z;
            ConfigureHL(hl);
            ConfigureOL(ol);
        }

        private void EstimateDigit() 
        {
            double result = 0;
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                result += OutputLayer[i].Axon;
            }
            result /= NumberOfNeuronsInOL;
            result = ComputeTotalDefect(); 
            EstimatedDigitValuesDic.Add(Z, result);
        }

        private double DetectCommand(double range) 
        {
            double resultVal = range;
            double resultKey = -1;
            foreach (var digit in EstimatedDigitValuesDic) 
            {
                if (digit.Value< resultVal) 
                {
                    resultVal = digit.Value;
                    resultKey = digit.Key;
                }
            }
            return resultKey * 10;
        }

        private void ConfigureHL(List<List<Neuron>> hl) 
        {
            HiddenLayer.Clear();
            HiddenLayer.AddRange(hl);
            NumberOfLayersInHL = hl.Count;

            ConfigurationHL = new List<int>();
            for (int i = 0; i < NumberOfLayersInHL; i++)
            {
                ConfigurationHL.Add(hl[i].Count);
            }

            for (int i = 0; i < NumberOfLayersInHL; i++)
            {
                for (int j = 0; j < ConfigurationHL[i]; j++)
                {
                    if (i == 0)
                    {
                        // dla 1. HL wszystkie WE
                        HiddenLayer[i][j].NumberOfInputSignals = NumberOfInputs;
                    }
                    else 
                    {
                        // dla pozostalych HL tyle ile neuronów we wcześniejszej warstwie
                        HiddenLayer[i][j].NumberOfInputSignals = HiddenLayer[i - 1].Count;
                    }
                }
            }
        }

        private void ConfigureOL(List<Neuron> ol)
        {
            OutputLayer.Clear();
            OutputLayer.AddRange(ol);
            NumberOfNeuronsInOL = ol.Count;
            for (int i = 0; i < NumberOfNeuronsInOL; i++)
            {
                OutputLayer[i].NumberOfInputSignals = HiddenLayer.Last().Count;
            }
        }
    }
}
