/**
*  Copyright (c) 2011, Alex Theodoridis
*  All rights reserved.

*  Redistribution and use in source and binary forms, with
*  or without modification, are permitted provided that the
*  following conditions are met:
*  Redistributions of source code must retain the above
*  copyright notice, this list of conditions and the following disclaimer.
*  Redistributions in binary form must reproduce the above
*  copyright notice, this list of conditions and the following
*  disclaimer in the documentation and/or other materials
*  provided with the distribution.

*  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS
*  AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
*  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
*  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
*  IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
*  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
*  OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
*  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
*  OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
*  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
*  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
*  ANY WAY OUT OF THE USE OF THIS SOFTWARE,
*  EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*/

#ifndef PerceptronH
#define PerceptronH

#include <NeuralNetwork/Serialization/PerceptronMemento.h>
#include <NeuralNetwork/INeuralLayer.h>
#include <NeuralNetwork/NNException.h>
#include <NeuralNetwork/Neuron/ActivationFunction/SigmoidFunction.h>
#include <vector>
#include <cassert>

namespace nn {

/*! \class Perceptron
 *  \briefs Contains an input neurons layer one output and one or more hidden layers.
 */
template<typename NeuralLayerType
	,template <typename> class LastOutputFunctionType = SigmoidFunction
	>
class Perceptron {
public:
    /*!
     * Default number of neurons in output neural layer.
     */
    static const unsigned int CONST_DEFAULT_PERCEPTRON_OUTPUTS_NUMBER = 1;
    typedef nn::INeuralLayer < NeuralLayerType > NeuralLayer;
    typedef typename NeuralLayer::Neuron Neuron;
    typedef typename Neuron::OutputFunction OutputFunction;
    typedef typename NeuralLayer::Var Var;
    typedef typename std::vector< NeuralLayer >::const_iterator const_iterator;
    typedef typename std::vector< NeuralLayer >::iterator iterator;
    typedef typename std::vector< NeuralLayer >::reverse_iterator reverse_iterator;
    typedef typename std::vector< NeuralLayer >::const_reverse_iterator const_reverse_iterator;
    typedef PerceptronMemento<Var> Memento;
    
    template<typename VarType>
    using  OutputActivationFunctionType = LastOutputFunctionType<VarType>;
	
private:
    typedef typename Neuron::NeuronType::template rebind< LastOutputFunctionType<Var> >::type OutputNeuronType;
    typedef typename NeuralLayerType::template rebindNeuron< OutputNeuronType >::type OutputLayerType;
    typedef nn::INeuralLayer<OutputLayerType> OutputLayer;
    
private:
    /*!
     * Hidden layers.
     */
    std::vector< NeuralLayer > m_layers;
    
    /**
     * Output layer.
     */
    OutputLayer m_outputLayer;

public:
    /*!
     *
     */
    Perceptron(unsigned int inputsNumber, unsigned int outputsNumber = CONST_DEFAULT_PERCEPTRON_OUTPUTS_NUMBER) : m_outputLayer(inputsNumber, outputsNumber)
    {
        m_layers.reserve( 30 );
        m_layers.emplace_back( inputsNumber, inputsNumber );
    }

    const_iterator cbegin()const {
        return m_layers.begin();
    }

    const_iterator cend()const {
        return m_layers.end();
    }

    iterator begin() {
        return m_layers.begin();
    }
    
    iterator end() {
        return m_layers.end();
    }

    reverse_iterator rbegin() {
        return m_layers.rbegin();
    }

    reverse_iterator rend() {
        return m_layers.rend();
    }

    const_reverse_iterator crbegin()const {
        return m_layers.rbegin();
    }

    const_reverse_iterator crend()const {
        return m_layers.rend();
    }
    
    OutputLayer& getOutputLayer(){
      return m_outputLayer;
    }
    
    unsigned int size()const{
      return m_layers.size();
    }

    void setMemento( const Memento& memento )
    {
        auto layers = memento.getLayers();
        typename std::vector< NeuralLayer > internalLayers;
        internalLayers.reserve( layers.size() );
		std::transform(layers.begin(), layers.end() - 1, std::back_inserter(internalLayers), std::bind(nn::createLayer<NeuralLayer, Var>, std::placeholders::_1));
	m_outputLayer = createLayer< OutputLayer, Var >(*layers.rbegin());
        m_layers = internalLayers;
    }

    Memento getMemento()const
    {
        typename std::vector< NeuralLayerMemento<Var> > layers;
        layers.reserve( m_layers.size() );
		std::transform(m_layers.begin(), m_layers.end(), std::back_inserter(layers), std::bind(&NeuralLayer::getMemento, std::placeholders::_1));
	layers.push_back( m_outputLayer.getMemento() );
        PerceptronMemento<Var> memento;
        memento.setLayers(layers);
        return memento;
    }

    /*!
     * @brief will add a new layer to perceptron.
     * @brief The outputs number should stay the same in that case. Therefore a new layer will replace of of the intermediate layers.
     * @param neuronsNumber a number of neurons in a new layer.
     */
    void addLayer(unsigned int neuronsNumber) {
        assert(!m_layers.empty() && "Perceptron: invalid layers count");
        auto inputsNumber = m_layers.rbegin()->size();
        auto outputsNumber = m_outputLayer.size();
        m_layers.emplace_back( inputsNumber, neuronsNumber );
	m_outputLayer = OutputLayer(neuronsNumber, outputsNumber);
    }

    /*!
     * @brief this method will calculate the outputs of perceptron.
     * @param begin is the iterator which is pointing to the first input
     * @param end the iterator which is pointing to the last input
     * @param out the output iterator where the results of the calculation will be stored.
     */
    template<typename Iterator, typename OutputIterator>
    void calculateOutputs(Iterator begin, Iterator end, OutputIterator out)
    {
        unsigned int inputId = 0;
        while( begin != end ) {
            m_layers[0].setInput(inputId, *begin);
	    begin++;
	    inputId++;
        }

        unsigned int layersNumber = m_layers.size();
        for (unsigned int i = 0; i < layersNumber - 1; i++)
        {
           m_layers[i].calculateOutputs( m_layers[i + 1] );
        }

        m_layers.rbegin()->calculateOutputs( m_outputLayer );
        m_outputLayer.calculateOutputs();
	std::transform(m_outputLayer.begin(), m_outputLayer.end(), out, std::bind(&OutputLayer::Neuron::getOutput, std::placeholders::_1));
    }
    
    /**
     * @brief only for the testing purpose.
     * @brief please don't use this function.
     */
    template<typename Test>
    void supportTest(Test&);

    /*!
     * Destructor
     */
    ~Perceptron() {
    }
};

}

#endif
