/**
*  Copyright (c) 2011, Alex Theodoridis
*  All rights reserved.

*  Redistribution and use in source and binary forms, with
*  or without modification, are permitted provided that the
*  following conditions are met:
*  Redistributions of source code must retain the above
*  copyright notice, this list of conditions and the following disclaimer.
*  Redistributions in binary form must reproduce the above
*  copyright notice, this list of conditions and the following
*  disclaimer in the documentation and/or other materials
*  provided with the distribution.

*  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS
*  AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
*  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
*  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
*  IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
*  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
*  OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
*  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
*  OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
*  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
*  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
*  ANY WAY OUT OF THE USE OF THIS SOFTWARE,
*  EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*/

#ifndef _HIDDENNEURALLAYER_H
#define	_HIDDENNEURALLAYER_H
#include <NeuralNetwork/LearningAlgorithm/BackPropagation/ANeuralLayer.h>

namespace nn {

namespace bp {

template<class Var>
class HiddenNeuralLayer : public ANeuralLayer<Var> {
public:

    HiddenNeuralLayer(const typename utils::SharedPtr<NeuralLayer<Var> >& neuralLayer, const Var& varP) :
            ANeuralLayer<Var> (neuralLayer, varP) {
    }

    bool calculateLayerDeltas(const typename utils::SharedPtr<INeuralLayer<Var> >& affectedLayer, const NNSamplesSet<Var>* dataSet = NULL) {
        if (affectedLayer == NULL) {
            return false;
        }

        typename std::map<unsigned int, BPNeuron<Var> >::iterator curNeuron;
        for (curNeuron = ANeuralLayer<Var>::_neurons.begin(); curNeuron != ANeuralLayer<Var>::_neurons.end(); curNeuron++) {
            unsigned int curNeuronId = curNeuron->first;
            unsigned int neuronsNumber = affectedLayer->getNeuronsNumber();
            Var sum = 0.0f; //sum(aDelta*aWeight)
            for (unsigned int i = 0; i < neuronsNumber; i++) {
                Var affectedDelta = affectedLayer->getDelta(i);
                Var affectedWeight = affectedLayer->getInputWeight(i, curNeuronId);
                sum = sum + affectedDelta * affectedWeight;
            }

            Var differential = curNeuron->second.calculateDifferential();
            Var delta = sum * differential;
            curNeuron->second.setDelta(delta);
        }

        return true;
    }

    virtual ~HiddenNeuralLayer() throw () {

    }
};

}

}

#endif	/* _HIDDENNEURALLAYER_H */

// kate: indent-mode cstyle; space-indent on; indent-width 0; 
