﻿
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using DotNetNeural.Activation;
using DotNetNeural.Data.Utils;
using DotNetNeural.Perceptron;
using DotNetNeural.Perceptron.Initialization;

namespace DotNetNeural.BackPropagation
{
    public class MultilayerPreceptron : IPerceptron, IAdjustablePerceptron
    {
        public static readonly DefaultPerceptronSettings DefaultSettings = new DefaultPerceptronSettings();

        public MultilayerPreceptron()
            : this(1, 1)
        {
        }

        public MultilayerPreceptron(int inputsCount, int outputsCount)
            : this(inputsCount, outputsCount, 0)
        {
        }

        public MultilayerPreceptron(int inputsCount, int outputsCount, int hiddenLayersCount)
            : this(inputsCount, outputsCount, hiddenLayersCount, DefaultSettings)
        {
        }

        public MultilayerPreceptron(int inputsCount, int outputsCount, IActivationFunc aFunc)
            : this(inputsCount, outputsCount, 0, aFunc)
        {
        }

        public MultilayerPreceptron(int inputsCount, int outputsCount, int hiddenLayersCount, IActivationFunc aFunc)
        {
            if (inputsCount <= 0 || outputsCount <= 0 || hiddenLayersCount < 0)
                throw new ArgumentException("Illegal negative intputs/outputs count or hiddenLayersCount");

            if (aFunc == null)
                throw new NullReferenceException("Illegal null-reference activation function"); 

            ActivationFunc = aFunc;
            InputsCount = inputsCount;
            OutputsCount = outputsCount;
            layers = new List<float[,]>(hiddenLayersCount + 1);

            for (int i = 0; i < hiddenLayersCount + 1; ++i)
                AddLayer(inputsCount);
        }

        #region IPerceptron Members

        public int LayersCount
        {
            get
            {
                return layers.Count;
            }
        }

        public int InputsCount { get; private set; }

        public int OutputsCount { get; private set; }

        public IActivationFunc ActivationFunc { get; private set; }

        public void LoadWeights(Stream s)
        {
            PerceptronSerializer serializer = new PerceptronSerializer();
            layers = serializer.LoadFromStream(s);

            var firstLayer = layers.FirstOrDefault();
            var lastLayer = layers.LastOrDefault();

            if (firstLayer == null || lastLayer == null)
                throw new Exception("Can't deserialize perceptron weights");

            InputsCount = firstLayer.GetLength(0);
            OutputsCount = lastLayer.GetLength(1);
        }

        public void SaveWeights(Stream s)
        {
            PerceptronSerializer serializer = new PerceptronSerializer();
            serializer.SaveToStream(s, layers);
        }

        public void InitWeights(IWeightsInitializer wFunc)
        {
            foreach (var l in layers)
            {
                int rowsCount = l.GetLength(0);
                int colsCount = l.GetLength(1);

                for (int i = 0; i < rowsCount; ++i)
                {
                    for (int j = 0; j < colsCount; ++j)
                    {
                        l[i, j] = wFunc.GenerateValue();
                    }
                }
            }
        }

        public bool CompareWeights(IPerceptron perceptron, float precision)
        {
            if (perceptron == null)
                return false;

            if (LayersCount != perceptron.LayersCount)
                return false;

            for (int i = 0; i < LayersCount; ++i)
            {
                var sourceLayer = layers[i];
                var targetLayer = perceptron[i];

                if (!sourceLayer.Compare(targetLayer, 0.001f))
                    return false;
            }

            return true;
        }

        public void AddLayer(int neuronsCount)
        {
            if (neuronsCount <= 0)
                throw new ArgumentException("Illegal negative number of neurons in layer");

            if (LayersCount == 0)
            {
                layers.Add(new float[InputsCount, OutputsCount]);
            }
            else
            {
                //network does have only one layer -- output one. it acts as the only hidden layer
                if (layers.Count == 1)
                {
                    layers.Clear();
                    layers.Add(new float[InputsCount, neuronsCount]);
                    layers.Add(new float[neuronsCount, OutputsCount]);
                }
                else
                {
                    var lastLayer = layers.LastOrDefault();
                    int lastLayerInputsCount = lastLayer.GetLength(0);

                    int lastLayerIndex = layers.Count - 1;
                    layers[lastLayerIndex] = new float[lastLayerInputsCount, neuronsCount];
                    layers.Add(new float[neuronsCount, OutputsCount]);
                }
            }
        }

        public float[] Propagate(float[] input)
        {
            ValidateInputBeforePropagation(input);

            int layerInputsCount = -1;
            int layerOutputsCount = -1;
            float[] currentLayerOutput = null;
            float[] previousLayerOutput = input;

            Func<float, float> aFunc = ActivationFunc.Activation;

            for (int k = 0; k < layers.Count; ++k)
            {
                var currentLayer = layers[k];

                layerInputsCount = currentLayer.GetLength(0);
                layerOutputsCount = currentLayer.GetLength(1);

                currentLayerOutput = new float[layerOutputsCount];

                for (int i = 0; i < layerOutputsCount; ++i)
                {
                    for (int j = 0; j < layerInputsCount; ++j)
                    {
                        currentLayerOutput[i] += previousLayerOutput[j] * currentLayer[j, i];
                    }

                    currentLayerOutput[i] = aFunc(currentLayerOutput[i]);
                }

                previousLayerOutput = currentLayerOutput;
            }

            return currentLayerOutput;
        }

        public float[] Propagate(float[] input, IPropagationContext context)
        {
            if (context == null)
                return Propagate(input);

            ValidateInputBeforePropagation(input);

            float[][] layerOutputs = new float[LayersCount][];
            float[][] layerRawOutputs = new float[LayersCount][];

            int layerInputsCount = -1;
            int layerOutputsCount = -1;

            Func<float, float> aFunc = ActivationFunc.Activation;

            for (int k = 0; k < layers.Count; ++k)
            {
                var currentLayer = layers[k];

                layerInputsCount = currentLayer.GetLength(0);
                layerOutputsCount = currentLayer.GetLength(1);

                layerRawOutputs[k] = new float[layerOutputsCount];
                layerOutputs[k] = new float[layerOutputsCount];

                for (int i = 0; i < layerOutputsCount; ++i)
                {
                    for (int j = 0; j < layerInputsCount; ++j)
                    {
                        if (k == 0)
                        {
                            layerRawOutputs[k][i] += input[j] * currentLayer[j, i];
                        }
                        else
                        {
                            layerRawOutputs[k][i] += layerOutputs[k-1][j] * currentLayer[j, i];
                        }
                    }

                    layerOutputs[k][i] = aFunc(layerRawOutputs[k][i]);
                }
            }

            context.ActivationFunction = ActivationFunc;
            context.LayersOutputs = layerOutputs;
            context.LayerRawOutputs = layerRawOutputs;

            return context.LayersOutputs[LayersCount - 1];
        }

        public float[,] this[int index]
        {
            get
            {
                return layers[index];
            }
        }

        #endregion

        #region IAdjustablePerceptron Members

        public void AddNeuron(int layerIndex, int position)
        {
            throw new NotImplementedException();
        }

        public void RemoveNeuron(int layerIndex, int position)
        {
            throw new NotImplementedException();
        }

        #endregion

        #region Private methods

        private void ValidateInputBeforePropagation(float[] input)
        {
            if (input == null)
                throw new NullReferenceException("Illegal null-reference input");

            if (input.Length != InputsCount)
                throw new ArgumentException("Illegal dimension of input vector. Doesn't match network inputs count");
        }

        #endregion

        #region Private fields

        private IList<float[,]> layers;

        #endregion
    }
}
