﻿using System;
using DotNetNeural.BackPropagation;

namespace DotNetNeural.Perceptron.Algorithm
{
    public class BackPropagationImpl
    {
        public const float DefaultLearningSpeed = 0.1f;

        public float LearningSpeed { get; private set; }

        public BackPropagationImpl()
            : this(DefaultLearningSpeed) { }

        public BackPropagationImpl(float learningSpeed)
        {
            LearningSpeed = learningSpeed;
        }

        public void BackPropagate(IPerceptron perceptron, float[] input, float[] expectedOutput)
        {
            if (perceptron == null || expectedOutput == null || input == null)
                throw new NullReferenceException("Illegal null args");

            if (perceptron.OutputsCount != expectedOutput.Length)
                throw new ArgumentException("Illegal dimensions of expected output vector and perceptron");

            if (perceptron.InputsCount != input.Length)
                throw new ArgumentException("Illegal dimensions of input vector and perceptron");

            IPropagationContext context = new PropagationContext();
            perceptron.Propagate(input, context);

            Func<float, float> dF = context.ActivationFunction.DerivativeActivation;
            int k = context.LayersCount - 1;
            int lCount = k;
            float[] curExpctOut = null;
            float[] sigmas = null;
            float[] prevOut = null;

            while (k > -1)
            {
                var currentLayer = perceptron[k];
                var currOut = context.LayersOutputs[k];
                var curRawOut = context.LayerRawOutputs[k];
                int inCnt = currentLayer.GetLength(0);
                int outCnt = currentLayer.GetLength(1);

                if (k > 0)
                {
                    if (k != lCount) // back propagate through hidden layer (not input nor output one)
                    {
                        sigmas =
                            BackPropagateWithSigmas(perceptron, dF, k, sigmas, context.LayersOutputs[k - 1], currentLayer, curRawOut, inCnt, outCnt);
                    }
                    else // back propagate through output layer
                    {
                        curExpctOut = expectedOutput;
                        sigmas = new float[outCnt];

                        for (int j = 0; j < outCnt; ++j)
                        {
                            sigmas[j] = (currOut[j] - curExpctOut[j]) * dF(curRawOut[j]);
                        }

                        ChangeWeights(currentLayer, sigmas, context.LayersOutputs[k - 1], inCnt, outCnt);
                    }
                }
                else // back propagate through first layer of network
                {
                    prevOut = input;

                    if (lCount == 0) // network includes the only one layer
                    {
                        curExpctOut = expectedOutput;
                        sigmas = new float[outCnt];

                        for (int j = 0; j < outCnt; ++j)
                        {
                            sigmas[j] = (currOut[j] - curExpctOut[j]) * dF(curRawOut[j]);
                        }

                        ChangeWeights(currentLayer, sigmas, prevOut, inCnt, outCnt);
                    }
                    else // network has more than one layer
                    {
                        sigmas =
                            BackPropagateWithSigmas(perceptron, dF, k, sigmas, prevOut, currentLayer, curRawOut, inCnt, outCnt);
                    }


                }

                --k;
            }
        }

        #region Private methods

        private float[] BackPropagateWithSigmas(IPerceptron perceptron, Func<float, float> dF, int k, float[] sigmas,
            float[] prevOut, float[,] currentLayer, float[] curRawOut, int inCnt, int outCnt)
        {
            var nextLayer = perceptron[k + 1];
            int nextLayerOutsCnt = nextLayer.GetLength(1);
            float[] cSigmas = new float[outCnt];

            for (int j = 0; j < outCnt; ++j)
            {
                for (int l = 0; l < nextLayerOutsCnt; ++l)
                {
                    cSigmas[j] += sigmas[l] * nextLayer[j, l];
                }

                cSigmas[j] *= dF(curRawOut[j]);
            }

            sigmas = cSigmas;

            ChangeWeights(currentLayer, sigmas, prevOut, inCnt, outCnt);
            return sigmas;
        }

        private void ChangeWeights(float[,] weights, float[] sigmas, float[] prevOut, int inCnt, int outCnt)
        {
            for (int i = 0; i < inCnt; ++i)
            {
                for (int j = 0; j < outCnt; ++j)
                {
                    weights[i, j] += -LearningSpeed * sigmas[j] * prevOut[i];
                }
            }
        }

        #endregion
    }
}
