/**
*  Copyright (c) 2011, Alex Theodoridis
*  All rights reserved.

*  Redistribution and use in source and binary forms, with 
*  or without modification, are permitted provided that the 
*  following conditions are met:
*  Redistributions of source code must retain the above 
*  copyright notice, this list of conditions and the following disclaimer.
*  Redistributions in binary form must reproduce the above 
*  copyright notice, this list of conditions and the following
*  disclaimer in the documentation and/or other materials 
*  provided with the distribution.

*  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS
*  AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
*  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
*  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
*  IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
*  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
*  OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
*  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
*  OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 
*  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
*  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
*  ANY WAY OUT OF THE USE OF THIS SOFTWARE,
*  EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*/

#ifndef PerceptronH
#define PerceptronH

#include <NeuralNetwork/Perceptron/NeuralLayer/NeuralLayer.h>
#include <NeuralNetwork/Perceptron/NeuralLayer/InputLayer.h>
#include <NeuralNetwork/Serialization/IPerceptronStream.h>
#include <Utilities/SmartPtr/SharedPtr.h>
#include <NeuralNetwork/Serialization/PerceptronMemento.h>
#include <NeuralNetwork/INeuralNetwork.h>

namespace nn {

    /*! \class Perceptron
     *  \briefs Contains an input neurons layer one output and one or more hidden layers.
     *
     */
    template<class Var>
    class PerceptronNeuralNetwork : public INeuralNetwork< Var >{
    public:
        /*!
         * Default number of neurons in output neural layer.
         */
        static const unsigned int CONST_DEFAULT_PERCEPTRON_OUTPUTS_NUMBER;

    private:
        /*!
         * Hidden layers
         */
        typename std::map<unsigned int, utils::SharedPtr<NeuralLayer<Var> > > _layers;

        /*! Input layer
         *	\brief Layer that represent the input neurons
         */
        typename utils::SharedPtr<InputLayer<Var> > _inputLayer;

        /*!
         *
         */
        typename utils::SharedPtr<NeuralLayer<Var> > _outputLayer;

        /*!
         *
         */
        const typename utils::SharedPtr<IEquationFactory<Var> > _equationFactory;

        /*!
         *
         */
        mutable PerceptronMemento<Var> _lastMemento;

    public:

        /*!
         *
         */
        PerceptronNeuralNetwork(unsigned int inputsNumber, const typename utils::SharedPtr<IEquationFactory<Var> >& equationFactory,
                unsigned int outputsNumber = CONST_DEFAULT_PERCEPTRON_OUTPUTS_NUMBER) : _equationFactory(equationFactory) {
            if (inputsNumber < 1) {
                throw NNException("Wrong argument inputsNumber", std::string(__FILE__), __LINE__);
            }

            _inputLayer = new InputLayer<Var > (inputsNumber);
            _outputLayer = new NeuralLayer<Var > (inputsNumber, outputsNumber, equationFactory);
        }

        /*!
         *
         */
        bool setInput(unsigned int inputId, const Var& inputValue) {
            return _inputLayer->setValue(inputId, inputValue);
        }

        /*!
         *
         */
        const utils::SharedPtr<NeuralLayer<Var> >& getLayer(unsigned int layerId) {
            typename std::map<unsigned int, utils::SharedPtr<NeuralLayer<Var> > >::iterator curLayer;
            curLayer = _layers.find(layerId);
            if (curLayer == _layers.end()) {
                throw NNException("Wrong argument layerId", __FILE__, __LINE__);
            }

            return curLayer->second;
        }

        /*!
         *
         */
        Var getOutput(unsigned int outputId) const{
            return _outputLayer->getNeuron(outputId)->getOutput();
        }

        /*! GetOutputsNumber method
         *	\return a number of neurons in output neural layer.
         */
        unsigned int getOutputsNumber(void) const {
            return _outputLayer->getNeuronsNumber();
        }

        /*!
         *
         */
        unsigned int getInputsNumber(void) const {
            return _inputLayer->getInputsNumber();
        }

        /*!
         *
         */
        unsigned int getLayersNumber(void) const {
            return _layers.size();
        }

        /*!
         *
         */
        Var getInput(unsigned int inputId) const {
            return _inputLayer->GetInput(inputId);
        }

        /*!
         *
         */
        const utils::SharedPtr<NeuralLayer<Var> >& getOutputLayer(void) const {
            return _outputLayer;
        }

        bool setMemento( const PerceptronMemento<Var>& memento ){
            bool result=true;
            std::map< unsigned int, NeuralLayerMemento<Var> > layers = memento.getLayers();
            typename std::map< unsigned int, NeuralLayerMemento<Var> >::iterator curLayer=layers.begin();

            if( curLayer != layers.end() ){
                unsigned int inputsNumber = curLayer->second.getInputsNumber();
                
		typename std::map<unsigned int, utils::SharedPtr<NeuralLayer<Var> > > internalLayers;
		for( curLayer = layers.begin(); curLayer != layers.end(); curLayer++ ){
		    utils::SharedPtr<NeuralLayer<Var> > layer( new NeuralLayer<Var>(curLayer->second.getInputsNumber(), curLayer->second.getNeuronsNumber(), _equationFactory) );
		    internalLayers.insert( std::make_pair(curLayer->first, layer ) );
		    result &= layer->setMemento( curLayer->second );
		}
		
		if( result ){
		  _inputLayer = new InputLayer<Var>(inputsNumber);
		  _layers = internalLayers;
		  _outputLayer->setMemento(memento.getOutputLayer());
		}
            }


            return result;
        }

        const PerceptronMemento<Var>& getMemento()const{
            typename std::map< unsigned int, NeuralLayerMemento<Var> > layers;
            typename std::map< unsigned int, utils::SharedPtr< NeuralLayer<Var> > >::const_iterator curNeuralLayer;
            for ( curNeuralLayer=_layers.begin(); curNeuralLayer != _layers.end() ; curNeuralLayer++ ){
                layers.insert( std::make_pair(curNeuralLayer->first, curNeuralLayer->second->getMemento() ) );
            }

            _lastMemento.setLayers(layers);
	    _lastMemento.setOutputLayer( _outputLayer->getMemento() );
            return _lastMemento;
        }

        /*!
         *
         */
        bool addLayer(unsigned int neuronsNumber) {
            bool result = false;
            unsigned int inputsNumber = 0;
            if (_layers.size() == 0) {
                inputsNumber = _inputLayer->getInputsNumber();
            } else {
                inputsNumber = _layers.rbegin()->second->getNeuronsNumber();
            }

            typename utils::SharedPtr<NeuralLayer<Var> > neuralLayer = new NeuralLayer<Var > (inputsNumber, neuronsNumber, _equationFactory);
            result = _layers.insert( std::make_pair(_layers.size(), neuralLayer) ).second;

            if (result) {
                _outputLayer = new NeuralLayer<Var > (neuralLayer->getNeuronsNumber(), _outputLayer->getNeuronsNumber(), _equationFactory);
            }

            return result;
        }

        /*!
         *
         */
        bool calculateOutputs() {
            bool result = false;
            if (_layers.size() == 0) {
                return _inputLayer->calculateLayer(_outputLayer);
            }

            if (_inputLayer->calculateLayer(_layers.begin()->second)) {
                unsigned int layersNumber = _layers.size();
                for (unsigned int i = 0; i < layersNumber; i++) {
                    if (i < layersNumber - 1) {
                        result = _layers[i]->calculateLayer(_layers[i + 1].getPtr());
                    } else {
                        result = _layers[i]->calculateLayer(_outputLayer.getPtr());
                    }
                }

                result &= _outputLayer->calculateLayer();
            }

            return result;
        }

        /*!
         * Destructor
         */
        virtual ~PerceptronNeuralNetwork() throw () {
        }
    };

    template<class Var>
    const unsigned int PerceptronNeuralNetwork<Var>::CONST_DEFAULT_PERCEPTRON_OUTPUTS_NUMBER = 1;

}

#endif
