/**
*  Copyright (c) 2011, Alex Theodoridis
*  All rights reserved.

*  Redistribution and use in source and binary forms, with 
*  or without modification, are permitted provided that the 
*  following conditions are met:
*  Redistributions of source code must retain the above 
*  copyright notice, this list of conditions and the following disclaimer.
*  Redistributions in binary form must reproduce the above 
*  copyright notice, this list of conditions and the following
*  disclaimer in the documentation and/or other materials 
*  provided with the distribution.

*  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS
*  AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
*  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
*  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
*  IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
*  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
*  OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
*  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
*  OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 
*  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
*  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
*  ANY WAY OUT OF THE USE OF THIS SOFTWARE,
*  EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*/

#ifndef BEPTrainerH
#define BEPTrainerH
#include <NeuralNetwork/Perceptron/PerceptronNeuralNetwork.h>
#include <NeuralNetwork/LearningAlgorithm/BackPropagation/NNSamplesSet.h>
#include <NeuralNetwork/LearningAlgorithm/BackPropagation/HiddenNeuralLayer.h>
#include <NeuralNetwork/LearningAlgorithm/IAlgorithmListener.h>
#include <NeuralNetwork/LearningAlgorithm/BackPropagation/OutputNeuralLayer.h>
#include <math.h>
#include <queue>
#include <list>
#include <algorithm>

namespace nn {

    namespace bp {

        template<class Var>
        class BepAlgorithm {
        private:
            typename utils::SharedPtr<INeuralLayer<Var> > _outputLayer;
            typename std::map<unsigned int, utils::SharedPtr<INeuralLayer<Var> > > _layers;
            typename utils::SharedPtr<nn::PerceptronNeuralNetwork<Var> > _perceptron;
	    typename std::list< IAlgorithmListener<Var>* > _listeners;

        public:

            BepAlgorithm(const utils::SharedPtr<nn::PerceptronNeuralNetwork<Var> >& perceptron, float varP) {
                if (perceptron == NULL) {
                    throw NNException("Wrong perceptron pointer", __FILE__, __LINE__);
                }

                unsigned int layersNumber = perceptron->getLayersNumber();
                if (layersNumber == 0) {
                    throw NNException("Wrong perceptron argument", __FILE__, __LINE__);
                }

                for (unsigned int i = 0; i < layersNumber; i++) {
                    typename utils::SharedPtr<INeuralLayer<Var> > newLayer = new HiddenNeuralLayer<Var > (perceptron->getLayer(i), varP);
                    if (newLayer == NULL) {
                        throw NNException("Memory allocation error", __FILE__, __LINE__);
                    }

                    if (_layers.insert( std::make_pair(i, newLayer) ).second == false) {
                        throw NNException("Cant inset entry to layersList", __FILE__, __LINE__);
                    }
                }

                _outputLayer = new OutputNeuralLayer<Var > (perceptron->getOutputLayer(), varP);
                _perceptron = perceptron;
            }

            Var executeTrainingStep(const NNSamplesSet<Var>* dataSet = NULL){
                if (dataSet == NULL) {
                    throw NNException("Wrong argument dataSet==NULL", __FILE__, __LINE__);
                }

                //setupInputs
                unsigned int inputsNumber = dataSet->getInputsNumber();
                for (unsigned int i = 0; i < inputsNumber; i++) {
                    if (!_perceptron->setInput(i, dataSet->getInput(i))) {
                        throw NNException("Step execution failed", __FILE__, __LINE__);
                    }
                }

                //Calculate values with current inputs
                if (!_perceptron->calculateOutputs()) {
                    throw NNException("Step execution failed", __FILE__, __LINE__);
                }

                //Calculate deltas
                if (!_outputLayer->calculateLayerDeltas(NULL, dataSet)) {
                    throw NNException("Step execution failed", __FILE__, __LINE__);
                }

                int lastLayerId = _layers.size() - 1;
                for (int i = lastLayerId; i >= 0; i--) {
                    if (i == lastLayerId) {
                        _layers[lastLayerId]->calculateLayerDeltas(_outputLayer, dataSet);
                    } else {
                        if (!_layers[i]->calculateLayerDeltas(_layers[i + 1], dataSet)) {
                            throw NNException("Step execution failed", __FILE__, __LINE__);
                        }
                    }
                }

                //Calculate weights
                typename std::map<unsigned int, utils::SharedPtr<INeuralLayer<Var> > >::iterator layer;
                for (layer = _layers.begin(); layer != _layers.end(); layer++) {
                    if (!layer->second->calculateLayerWeights()) {
                        throw NNException("Step execution failed", __FILE__, __LINE__);
                    }
                }

                if (!_outputLayer->calculateLayerWeights()) {
                    throw NNException("Step execution failed", __FILE__, __LINE__);
                }

                //Calculate values with current inputs
                if (!_perceptron->calculateOutputs()) {
                    throw NNException("Step execution failed", __FILE__, __LINE__);
                }

                //Calculate error
                unsigned int outputsNumber = dataSet->getOutputsNumber();
                Var sum = 0;
                for (unsigned int i = 0; i < outputsNumber; i++) {
                    Var error = _perceptron->getOutput(i) - dataSet->getOutput(i);
                    error = error * error;
                    sum = sum + error;
                }

                return sqrt(sum);
            }

            void startTraining(float maxError, const typename std::vector<NNSamplesSet<Var> >& dataSets){
                if (dataSets.empty()) {
                    throw NNException("Algorithm error", __FILE__, __LINE__ );
                }

                unsigned int inputsNumber = _perceptron->getInputsNumber();
                unsigned int outputsNumber = _perceptron->getOutputsNumber();
                typename std::vector<NNSamplesSet<Var> >::const_iterator curData;
                for (curData = dataSets.begin(); curData != dataSets.end(); curData++) {
                    if (inputsNumber != curData->getInputsNumber()) {
                        throw NNException("Algorithm error", __FILE__, __LINE__ );
                    }

                    if (outputsNumber != curData->getOutputsNumber()) {
                        throw NNException("Algorithm error", __FILE__, __LINE__ );
                    }
                }

                Var error = maxError + 1.0f;
                do {
                    typename std::vector<NNSamplesSet<Var> > dataSetPool = dataSets;
                    typename std::queue<NNSamplesSet<Var> > dataSetQueue;
                    while (dataSetPool.size() > 0) {
                        unsigned int dataSetPos = rand() % dataSetPool.size();
                        dataSetQueue.push(dataSetPool[dataSetPos]);
                        dataSetPool.erase(dataSetPool.begin() + dataSetPos);
                    }

                    Var errorSum = 0;
                    while (dataSetQueue.size() > 0) {
                        NNSamplesSet<Var> dataSet = dataSetQueue.front();
                        errorSum += executeTrainingStep(&dataSet);
                        dataSetQueue.pop();
                    }

                    Var size = float(dataSets.size());
                    error = errorSum / size;

                    typename std::list< IAlgorithmListener<Var>* >::iterator it = _listeners.begin();
		    for( it = _listeners.begin(); it != _listeners.end(); ++it ){
		      (*it)->onChangeEpoch( _perceptron, error );
		    }
		    
                } while (error > maxError);
            }

            bool addListener( IAlgorithmListener<Var>* listener) {
                  bool result = false;
		  if( listener != NULL ){
		    if( std::find( _listeners.begin(), _listeners.end(), listener ) == _listeners.end() ){
		      _listeners.push_back(listener);
		    }
		  }
		  
		  return result;
            }

            virtual ~BepAlgorithm() throw () {
            }
        };
        //---------------------------------------------------------------------------

    }

}

#endif
