/**********************************************************************
*  Copyright (c) 2007 - Victor Jacobs - victor.jacobs@gmail.com
*
*  Permission is hereby granted, free of charge, to any person
*  obtaining a copy of this software and associated documentation
*  files (the "Software"), to deal in the Software without
*  restriction, including without limitation the rights to use,
*  copy, modify, merge, publish, distribute, sublicense, and/or sell
*  copies of the Software, and to permit persons to whom the
*  Software is furnished to do so, subject to the following
*  conditions:
*
*  The above copyright notice and this permission notice shall be
*  included in all copies or substantial portions of the Software.
*
*  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
*  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
*  OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
*  NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
*  HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
*  WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
*  FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
*  OTHER DEALINGS IN THE SOFTWARE.
**********************************************************************/

using System;
using System.Collections.Generic;
using System.Text;

namespace Vj.Ann
{
    public class BaseNetwork
    {
        #region Data Fields

        private static int [] neuronsPerLayer;
        private static double minWeight;
        private static double maxWeight;
        private static double learnRate;

        #endregion 

        #region Properties

        private double learningRate;

        public double LearningRate
        {
            get { return learningRate; }
            set { learningRate = value; }
        }

        private List<BaseLayer> layers;

        public List<BaseLayer> Layers
        {
            get { return layers; }
        }

        public BaseLayer InputLayer
        {
            get { return layers[0]; }
        }

        public List<BaseLayer> HiddenLayers
        {
            get
            {
                if (layers.Count < 3)
                    return null;

                return layers.GetRange(1, layers.Count - 2);
            }
        }

        public BaseLayer OutputLayer
        {
            get { return layers[layers.Count - 1]; }
        }


        #endregion

        #region Constructor

        public BaseNetwork(int numberOfHiddenLayers)
        {
            layers = new List<BaseLayer>();

            layers.Add(new BaseLayer(LayerType.Input));

            for (int i = 0; i < numberOfHiddenLayers; i++)
                layers.Add(new BaseLayer(LayerType.Hidden));

            layers.Add(new BaseLayer(LayerType.Output));
        }

        #endregion

        #region Network Creation

        public static BaseNetwork Create(int[] neurons, double learningRate, double minWeight, double maxWeight)
        {
            // used for reset()
            BaseNetwork.neuronsPerLayer = neurons;
            BaseNetwork.learnRate = learningRate;
            BaseNetwork.minWeight = minWeight;
            BaseNetwork.maxWeight = maxWeight;
                        
            BaseNetwork newNet;

            if (neurons.Length < 3)
                throw new ApplicationException("The number of layers on a backpropagation net must be at least 3 (input, hidden, output)");

            newNet = new BaseNetwork(neurons.Length - 2);
            newNet.LearningRate = learningRate;
                        
            CreateNeurons(neurons, newNet);
            CreateConnections(newNet, minWeight, maxWeight);

            return newNet;
        }

        private static void CreateNeurons(int[] neurons, BaseNetwork newNet)
        {
            BaseNeuron neuron;

            for (int i = 0; i < neurons.Length; i++)
            {
                //Create the neurons of the layer
                for (int j = 0; j < neurons[i]; j++)
                {
                    neuron = new BaseNeuron(NeuronType.Normal);
                    newNet.Layers[i].Neurons.Add(neuron);
                }

                if (i != neurons.Length - 1)
                {
                    neuron = new BaseNeuron(NeuronType.Bias);

                    neuron.Input = 1.0f;
                    neuron.Output = 1.0f;
                    newNet.Layers[i].Neurons.Add(neuron);
                }
            }
        }

        private static void CreateConnections(BaseNetwork newNet, double minWeight, double maxWeight)
        {
            BaseConnection connection;
            Random generator = new Random();

            double gamma = maxWeight - minWeight;
            double nu = -minWeight;

            for (int i = 0; i < newNet.Layers.Count - 1; i++)
            {
                for (int j = 0; j < newNet.Layers[i].Neurons.Count; j++)
                {
                    for (int z = 0; z < newNet.Layers[i + 1].Neurons.Count; z++)
                    {
                        if (newNet.Layers[i + 1].Neurons[z].Type == NeuronType.Bias)
                            continue;

                        connection = new BaseConnection(newNet.Layers[i].Neurons[j], newNet.Layers[i + 1].Neurons[z]);
                        connection.Weight = generator.NextDouble() * gamma - nu;

                        newNet.Layers[i].Neurons[j].Connections.Add(connection);
                        newNet.Layers[i + 1].Neurons[z].Connections.Add(connection);
                    }
                }
            }
        }

        public static BaseNetwork Reset()
        {
            return Create(neuronsPerLayer, learnRate, minWeight, maxWeight);
        }

        #endregion

        #region Train

        public void Train(List<Pattern> patterns)
        {
            for (int i = 0; i < patterns.Count; i++)
                Train(patterns[i]);
        }


        public void Test(Pattern pattern)
        {
            Propagate(pattern);
        }

        public void Train(Pattern pattern)
        {
            Propagate(pattern);
            Backpropagate();
            UpdateWeights();
        }

        #endregion

        #region Propogate

        private void Propagate(Pattern pattern)
        {
            SigmoidFunction sigFunc = new SigmoidFunction(-1.0, 1.0, 0.1);

            double accumulator;

            //Set the pattern in the input units
            for (int i = 0; i < pattern.Inputs.Length; i++)
                InputLayer.Neurons[i].Output = pattern.Inputs[i];

            //Set the expected values in the output units
            for (int i = 0; i < pattern.Outputs.Length; i++)
                OutputLayer.Neurons[i].ExpectedValue = pattern.Outputs[i];

            //Propagation
            List<BaseLayer> targetLayers = new List<BaseLayer>();
            targetLayers.AddRange(layers.GetRange(1, layers.Count - 1));

            //Propagate the values in the net
            foreach (BaseLayer layer in targetLayers)
                foreach (BaseNeuron neuron in layer.Neurons)
                {
                    if (neuron.Type == NeuronType.Bias)
                        continue;

                    //Calculate the inputs to the neurons
                    accumulator = 0.0f;
                    foreach (BaseConnection connection in neuron.Connections)
                        if (connection.EndNode == neuron)
                            accumulator += connection.Weight * connection.StartNode.Output;

                    //Set the input and apply the output function to calculate the output
                    neuron.Input = accumulator;
                    neuron.Output = sigFunc.Sigmoid(accumulator);
                }
        }

        private void Backpropagate()
        {
            double accumulator;

            SigmoidFunction sigFunc = new SigmoidFunction(-1.0, 1.0, 0.1);

            //Backpropagation
            List<BaseLayer> targetLayers = new List<BaseLayer>();
            targetLayers.AddRange(layers.GetRange(0, layers.Count - 1));
            targetLayers.Reverse();

            //Calculate the error in the output units
            foreach (BaseNeuron neuron in OutputLayer.Neurons)
                neuron.ErrorOutput = (neuron.ExpectedValue - neuron.Output) * sigFunc.FirstDerivativeSigmoid(neuron.Input);

            //Retropropagate the error and calculate the weight update terms
            foreach (BaseLayer layer in targetLayers)
                foreach (BaseNeuron neuron in layer.Neurons)
                {
                    accumulator = 0.0f;
                    foreach (BaseConnection connection in neuron.Connections)
                        if (connection.StartNode == neuron)
                        {
                            accumulator += connection.Weight * connection.EndNode.ErrorOutput;
                            connection.WeightUpdate = learningRate * connection.EndNode.ErrorOutput * neuron.Output;
                        }

                    //Set the error input and apply the error output function to calculate the error output
                    neuron.ErrorInput = accumulator;
                    neuron.ErrorOutput = accumulator * sigFunc.FirstDerivativeSigmoid(neuron.Input);
                }
        }

        private void UpdateWeights()
        {
            foreach (BaseLayer layer in layers)
                foreach (BaseNeuron neuron in layer.Neurons)
                    foreach (BaseConnection connection in neuron.Connections)
                        if (connection.StartNode == neuron)
                            connection.Weight += connection.WeightUpdate;
        }

        #endregion

        #region File In/Out

        public bool Save(string filename)
        { return false; }

        public bool Read(string filename)
        { return false; }

        #endregion

    }
}
