﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

namespace FlashCards.NeuralNetworks
{
    public class NeuralNetwork
    {
        public List<List<Neuron>> NeuralLayers { get; set; }
        public List<int> LayersIds { get; set; }
        public int OutputsNumber { get; set; }
        public int LayersNumber { get; set; }
        public int InputsNumber { get; set; }
        public double LearningRatio { get; set; }

        public int FlashcardSetId { get; set; }
        public int? NetworkId { get; set; }

        public NeuralNetwork(int networkId, List<int> layersIds)
        {
            LearningRatio = 0.4;
            NeuralLayers = new List<List<Neuron>>();
            NetworkId = networkId;
            LayersIds = layersIds;
        }

        public NeuralNetwork(int layersNumber, int inputsNumber, int outputsNumber, int flashcardSetId, List<int> flashcardsIds)
        {
            LearningRatio = 0.4;
            LayersNumber = layersNumber;
            OutputsNumber = outputsNumber;
            InputsNumber = inputsNumber;
            NeuralLayers = new List<List<Neuron>>();
            NeuralLayers.Add(new List<Neuron>()); //warstwa wejsciowa

            FlashcardSetId = flashcardSetId;

            for (int i = 0; i < layersNumber; i++)
            {
                NeuralLayers.Add(new List<Neuron>());
            }


            //tworzenie warstwy wejściowej
            for (int i = 0; i < inputsNumber; i++)
            {
                NeuralLayers[0].Add(new ForwardingNeuron(flashcardsIds[i], Guid.NewGuid()));
            }

            NeuralLayers[0].Add(new BiasNeuron());

            int hiddenNeuronsNumber = (int)Math.Sqrt(inputsNumber * outputsNumber);


            //tworzenie warstw ukrytych
            for (int i = 1; i < layersNumber; i++)
            {

                for (int z = 0; z < hiddenNeuronsNumber; z++)
                {
                    var neuron = new Neuron(i == 1 ? inputsNumber + 1 : hiddenNeuronsNumber + 1);

                    for (int j = 0; j < NeuralLayers[i - 1].Count; j++)
                    {
                        neuron.Inputs.Add(NeuralLayers[i - 1].ElementAt(j));
                    }

                    NeuralLayers[i].Add(neuron);
                }

                NeuralLayers[i].Add(new BiasNeuron());
            }

            //tworzenie warstwy wyjściowej
            for (int i = 0; i < outputsNumber; i++)
            {
                var neuron = new Neuron(NeuralLayers[LayersNumber - 1].Count);

                for (int j = 0; j < NeuralLayers[LayersNumber - 1].Count; j++)
                {
                    neuron.Inputs.Add(NeuralLayers[LayersNumber - 1].ElementAt(j));
                }

                NeuralLayers[LayersNumber].Add(neuron);
            }
        }

        public void AddInput(int flashcardId)
        {
            InputsNumber++;

            var newNeuron = new ForwardingNeuron(flashcardId, Guid.NewGuid());
            NeuralLayers[0].Insert(NeuralLayers[0].Count -1, newNeuron);

            foreach (var item in NeuralLayers[1])
            {
                item.Inputs.Add(newNeuron);
                item.Weights.Add(1);
                item.PrevWeights.Add(1);
            }
        }

        public double [] GetResult(Dictionary<int, double> input)
        {
            var inputNeurons = NeuralLayers[0].Select(x => x as ForwardingNeuron).ToList();

            foreach (var pair in input)
            {
                var neuron = inputNeurons.FirstOrDefault(x => x.FlashcardId == pair.Key);
                neuron.InputValue = pair.Value;
            }

            var result = new double[OutputsNumber];

            for (int i = 0; i < NeuralLayers[LayersNumber].Count; i++)
            {
                result[i] = NeuralLayers[LayersNumber][i].CalculateActivationFunction();
            }

            return result;
        }

        public double[] GetResult(double[] input)
        {
            //for (int i = 0; i < flashcardsIds.Count; i++)
            //{
            //    var inputs = NeuralLayers[0].Select(x => x as NeuralNetworks.ForwardingNeuron).ToList();
            //    foreach (var item in inputs)
            //    {
            //        if (item != null)
            //        {
            //            item.InputValue = input[i];
            //        }
            //    }
            //}
            for (int i = 0; i < input.Length; i++)
            {
                var forwardingNeuron = NeuralLayers[0][i] as ForwardingNeuron;
                forwardingNeuron.InputValue = input[i];
            }


            var result = new double[OutputsNumber];

            for (int i = 0; i < NeuralLayers[LayersNumber].Count; i++)
            {
                result[i] = NeuralLayers[LayersNumber][i].CalculateActivationFunction();
            }

            return result;
        }


        public double[] Train(double[] input, double[] expectedResult)
        {
            for (int i = 0; i < input.Length; i++)
            {
                var forwardingNeuron = NeuralLayers[0][i] as ForwardingNeuron;
                forwardingNeuron.InputValue = input[i];
            }

            var result = new double[OutputsNumber];

            for (int i = 0; i < NeuralLayers[LayersNumber].Count; i++)
            {
                result[i] = NeuralLayers[LayersNumber][i].CalculateActivationFunction();
                NeuralLayers[LayersNumber][i].Error = (expectedResult[i] - result[i]) * NeuralLayers[LayersNumber][i].CalculateDerivative();
            }

            BackPropagate();

            return result;
        }

        public double[] Train(Dictionary<int, double> input, double [] expectedResult)
        {
            var inputNeurons = NeuralLayers[0].Select(x => x as ForwardingNeuron).ToList();

            foreach (var pair in input)
            {
                var neuron = inputNeurons.FirstOrDefault(x => x.FlashcardId == pair.Key);
                neuron.InputValue = pair.Value;
            }

            var result = new double[OutputsNumber];

            for (int i = 0; i < NeuralLayers[LayersNumber].Count; i++)
            {
                result[i] = NeuralLayers[LayersNumber][i].CalculateActivationFunction();
                NeuralLayers[LayersNumber][i].Error = (expectedResult[i] - result[i]) * NeuralLayers[LayersNumber][i].CalculateDerivative();
            }

            BackPropagate();

            return result;
        }

        void BackPropagate()
        {
            int i, j;
            double total;

            //obliczanie błędu
            for (int hiddenLayerIndex = LayersNumber - 1; hiddenLayerIndex >= 1; hiddenLayerIndex--)
            {
                for (j = 0; j < NeuralLayers[hiddenLayerIndex].Count; j++)
                {
                    total = 0;
                    for (i = 0; i < NeuralLayers[hiddenLayerIndex + 1].Count; i++)
                    {
                        if (NeuralLayers[hiddenLayerIndex + 1][i] is BiasNeuron)
                        {
                            total += NeuralLayers[hiddenLayerIndex][j].CalculateDerivative() * NeuralLayers[hiddenLayerIndex + 1][i].Error * NeuralLayers[hiddenLayerIndex + 1][i].Weights[0];
                        }
                        else
                            total += NeuralLayers[hiddenLayerIndex][j].CalculateDerivative() * NeuralLayers[hiddenLayerIndex + 1][i].Error * NeuralLayers[hiddenLayerIndex + 1][i].Weights[j];
                    }
                    NeuralLayers[hiddenLayerIndex][j].Error = total;
                }
            }

            //korekta wag
            for (int layerIndex = 1; layerIndex <= LayersNumber; layerIndex++)
            {
                for (j = 0; j < NeuralLayers[layerIndex].Count; j++)
                {
                    for (i = 0; i < NeuralLayers[layerIndex][j].Weights.Count; i++)
                    {
                        NeuralLayers[layerIndex][j].PrevWeights[i] = NeuralLayers[layerIndex][j].Weights[i];
                        NeuralLayers[layerIndex][j].Weights[i] = NeuralLayers[layerIndex][j].Weights[i] + LearningRatio * NeuralLayers[layerIndex][j].Error * NeuralLayers[layerIndex - 1][i].CalculateActivationFunction() + 0.5 * (NeuralLayers[layerIndex][j].Weights[i] - NeuralLayers[layerIndex][j].PrevWeights[i]);
                    }
                }
            }
        }

        double CalculatePatternError(double[] pattern)
        {
            double partial = 0;
            for (int i = 0; i < OutputsNumber; i++)
            {
                partial += Math.Pow(pattern[i] - NeuralLayers[LayersNumber][i].Output, 2.0);
            }
            return partial;
        }

        double CalculateError(double patternsSum, int patternsCount)
        {
            //double sum = 0;
            //foreach (var item in patternSets)
            //{
            //    sum += CalculatePatternError(item);
            //}

            return Math.Sqrt(patternsSum / (double)(patternsCount * OutputsNumber));
            //return sum / patternSets.Count;
        }

        public int TrainWholeSet(List<double[]> inputSets, List<double[]> patternSets, int maxIteration, double maxError)
        {
            int lastIteration = 0;
            for (int i = 0; i < maxIteration; i++)
            {
                double patternErrors = 0;
                for (int j = 0; j < inputSets.Count; j++)
                {
                    Train(inputSets[j], patternSets[j]);
                    patternErrors += CalculatePatternError(patternSets[j]);
                }

                var error = CalculateError(patternErrors, patternSets.Count);

                if (error <= maxError)
                    break;

                lastIteration = i;
            }
            return lastIteration;
        }

        public int TrainWholeSet(List<Dictionary<int,double>> inputSets, List<double[]> patternSets, int maxIteration, double maxError)
        {
            return TrainWholeSet(inputSets, patternSets, maxIteration, maxError, false);
        }

        public int TrainWholeSet(List<Dictionary<int,double>> inputSets, List<double[]> patternSets, int maxIteration, double maxError, bool randomOrder)
        {
            int lastIteration = 0;
            var rand = new Random();
            for (int i = 0; i < maxIteration; i++)
            {
                double patternErrors = 0;

                if (!randomOrder)
                {
                    for (int j = 0; j < inputSets.Count; j++)
                    {
                        Train(inputSets[j], patternSets[j]);
                        patternErrors += CalculatePatternError(patternSets[j]);
                    }
                }
                else
                {
                    var usedInputs = new List<int>();
                    var index = rand.Next(inputSets.Count);
                    while (usedInputs.Count != inputSets.Count)
                    {
                        if (!usedInputs.Contains(index))
                        {
                            Train(inputSets[index], patternSets[index]);
                            patternErrors += CalculatePatternError(patternSets[index]);
                            usedInputs.Add(index);
                        }
                        index = rand.Next(inputSets.Count);
                    }
                }

                var error = CalculateError(patternErrors, patternSets.Count);

                if (error <= maxError)
                    break;

                lastIteration = i;
            }
            return lastIteration;
        }

    }
}
