﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace NeuralNetwork
{
    public delegate double ActivationFunction(double net);
    public delegate double ErrorFunction(double output);

    public class NeuralNetworkTeacher
    {
        private double _momentumConstant;
        private double _learningRate;
        private List<NeuralLayerBase> _layers;
        private double[][][] _weights;
        private double[][][] _diffs;
        private List<double[]> _inputs;
        private List<double[]> _targets;
        private ActivationFunction _activationFunction;
        private ErrorFunction _errorFunction;
        private int maxRun = 100;
        private int run = 0;

        public NeuralNetworkModel Model
        {
            get
            {
                var layerCount = _layers.Count;
                int[] layerSizes = new int[layerCount];

                for (int i = 0; i < layerCount; ++i)
                {
                    layerSizes[i] = _layers[i].Size;
                }

                return new NeuralNetworkModel()
                {
                    momentumConstant = _momentumConstant,
                    learningRate = _learningRate,
                    diffs = _diffs,
                    weights = _weights,
                    layerSizes = layerSizes
                };
            }
        }

        public NeuralNetworkTeacher(int inputSize, int outputSize, int[] hiddenLayerSizes = null,
            double momentum = 0.7f, double learningRate = 0.9f)
        {
            _momentumConstant = momentum;
            _learningRate = learningRate;
            _layers = new List<NeuralLayerBase>();
            _inputs = new List<double[]>();
            _targets = new List<double[]>();
            _activationFunction = sigmoid;
            _errorFunction = sigmoidDerived;
            _layers.Add(new InputLayer(inputSize));

            if (hiddenLayerSizes != null)
            {
                foreach (var size in hiddenLayerSizes)
                {
                    _layers.Add(new HiddenLayer(size));
                }
            }

            _layers.Add(new OutputLayer(outputSize));
        }

        public NeuralNetworkTeacher(NeuralNetworkModel model)
        {
            _momentumConstant = model.momentumConstant;
            _learningRate = model.learningRate;
            _activationFunction = sigmoid;
            _errorFunction = sigmoidDerived;
            _layers = new List<NeuralLayerBase>();
            _layers.Add(new InputLayer(model.layerSizes[0]));

            for (int i = 1; i < model.layerSizes.Count() - 1; ++i)
            {
                _layers.Add(new HiddenLayer(model.layerSizes[i]));
            }

            _layers.Add(new OutputLayer(model.layerSizes.Last()));

            _weights = model.weights;
            _diffs = model.diffs;
            Complete = true;
        }

        public double[] Outputs
        {
            get
            {
                var outputLayer = _layers.Last();
                double[] outputs = new double[outputLayer.Size];

                for (int i = 0; i < outputLayer.Size; ++i)
                {
                    outputs[i] = outputLayer.Perceptrons[i].Output;
                }

                return outputs;
            }
        }

        public double Error
        {
            get
            {
                return calculateAvgError();
            }
        }

        public bool Complete { get; private set;  }

        public void AddInputTargetPair(double[] input, double[] target)
        {
            if (input.Count() != _layers[0].Size)
            {
                throw new ArgumentException(
                    "Perceptrons in input layer and size of inputs don't match!");
            }

            if (target.Count() != _layers.Last().Size)
            {
                throw new ArgumentException(
                    "Perceptrons in output layer and size of outputs don't match!");
            }

            _inputs.Add(input);
            _targets.Add(target);
        }

        private void generateWeights()
        {
            _weights = new double[_layers.Count - 1][][];
            _diffs = new double[_layers.Count - 1][][];

            for (int i = 0; i < _layers.Count - 1; ++i)
            {
                int size = _layers[i + 1].Size;
                _weights[i] = new double[size][];
                _diffs[i] = new double[size][];

                for (int j = 0; j < size; ++j)
                {
                    _weights[i][j] = new double[_layers[i].Size];
                    _diffs[i][j] = new double[_layers[i].Size];
                }
            }

            Random r = new Random();

            for (int i = 1; i < _layers.Count; ++i)
            {
                for (int j = 0; j < _layers[i - 1].Size; ++j)
                {
                    for (int k = 0; k < _layers[i].Size; ++k)
                    {
                        _weights[i - 1][k][j] = r.NextDouble() - 0.5;
                        _diffs[i - 1][k][j] = 0.0f;
                    }
                }
            }
        }

        public void Train()
        {
            generateWeights();

            do
            {
                Console.WriteLine("Run:" + run);
                for (int i = 0; i < _inputs.Count; ++i)
                {
                    ForwardCalculate(_inputs[i]);
                    calculateErrors(_targets[i]);
                    updateWeights();
                }

            } while (!errorCriteriaMet);

            Complete = true;
        }

        private double calculateAvgError()
        {
            var absAvgError = 0.0;
            var outputLayer = _layers.Last();

            foreach (OutputPerceptron p in outputLayer.Perceptrons)
            {
                absAvgError += Math.Abs(p.Error) * Math.Abs(p.Error);
            }

            absAvgError /= 2;

            return absAvgError;
        }

        private bool errorCriteriaMet
        {
            get
            {
                var error = calculateAvgError();

                Console.WriteLine("Error:" + error);

                return error < 0.02f || ++run == maxRun;
            }
        }

        public void SimulateWith(double[] inputs)
        {
            var savedLayers = new NeuralLayerBase[_layers.Count];
            _layers.CopyTo(savedLayers);
            ForwardCalculate(inputs);
            _layers.Clear();
            _layers.AddRange(savedLayers);
        }

        public void ForwardCalculate(double[] inputs)
        {
            for (int i = 0; i < inputs.Count(); ++i)
            {
                var ip = _layers[0].Perceptrons[i] as InputPerceptron;
                ip.Input = inputs[i];
                ip.CalculateOutput(null, null);
            }

            for (int i = 1; i < _layers.Count; ++i)
            {
                NeuralLayerBase currentLayer = _layers[i],
                    previousLayer = _layers[i - 1];

                for (int j = 0; j < currentLayer.Size; ++j)
                {
                    currentLayer.Perceptrons[j].CalculateOutput(
                        _weights[i - 1][j], previousLayer);
                    currentLayer.Perceptrons[j].Output 
                        = sigmoid(currentLayer.Perceptrons[j].Output);
                }
            }
        }

        private double sigmoid(double net)
        {
            return 1.0f / (1.0f + Math.Exp(-net));
        }

        private double sigmoidDerived(double output)
        {
            return output * (1 - output);
        }

        private void calculateErrors(double[] targets)
        {
            var outputLayer = _layers.Last() as OutputLayer;
            var size = outputLayer.Size;
            
            for (int i = 0; i < size; ++i)
            {
                var op = outputLayer.Perceptrons[i] as OutputPerceptron;
                op.Error = targets[i] - op.Output; 
            }
        }

        private void updateOutputLayerWeights()
        {
            var outputIndex = _layers.Count - 1;
            var weightIndex = outputIndex - 1;
            NeuralLayerBase currentLayer = _layers[outputIndex],
                   previousLayer = _layers[outputIndex - 1];

            for (int i = 0; i < currentLayer.Size; ++i)
            {
                for (int j = 0; j < previousLayer.Size; ++j)
                {
                    var oper = currentLayer.Perceptrons[i] as OutputPerceptron;
                    var pper = previousLayer.Perceptrons[j];
                    double delta = oper.Error * _errorFunction(oper.Output);
                    
                    oper.Delta = delta;

                    _diffs[weightIndex][i][j] = _diffs[weightIndex][i][j] * _momentumConstant
                        + _learningRate * delta * pper.Output;
                    _weights[weightIndex][i][j] += _diffs[weightIndex][i][j];
                }
            }
        }

        private void updateHiddenLayerWeights()
        {
            for (int i = _layers.Count - 2; i >= 1; i--)
            {
                for (int j = 0; j < _layers[i].Size; ++j)
                {
                    var hper = _layers[i].Perceptrons[j] as HiddenPerceptron;
                    
                    for (int l = 0; l < _layers[i - 1].Size; ++l)
                    {
                        var pper = _layers[i - 1].Perceptrons[l];
                        var deltaForward = 0.0;

                        for (int k = 0; k < _layers[i + 1].Size; ++k)
                        {
                            deltaForward += _layers[i + 1].Perceptrons[k].Delta *
                                _weights[i][k][j];
                        }

                        var delta = _errorFunction(hper.Output) * deltaForward;

                        hper.Delta = delta;

                        _diffs[i - 1][j][l] = _learningRate * delta * hper.Output +
                            _momentumConstant * _diffs[i - 1][j][l];
                        _weights[i - 1][j][l] += _diffs[i - 1][j][l];
                    }
                }
            }
        }

        private void updateWeights()
        {
            updateOutputLayerWeights();
            updateHiddenLayerWeights();
        }
    }
}
