/*
 Copyright 2013--Present JMM_PROGNAME
 
 This file is distributed under the terms of the JMM_PROGNAME License.
 
 You should have received a copy of the JMM_PROGNAME License.
 If not, see <JMM_PROGNAME WEBSITE>.
*/
// CREATED    : 10/12/2015
// LAST UPDATE: 10/12/2015

#include "statsxx/machine_learning/neural_network/deep_belief_network/DBN.hpp"

// STL
#include <stdexcept> // std::runtime_error()
#include <vector>    // std::vector<>

// jScience
#include "jScience/linalg/Vector.hpp" // Vector<>

// stats++
#include "statsxx/distribution.hpp" // distribution::binomial()
#include "statsxx/machine_learning/activation_functions.hpp" // activation_function::Logistic


// note: deterministic propagation (in backward propagation) is essentially the mean-field approximation
//
// note: the initial vector x is returned (without sampling) if (layern == layer0)
//
// note: one should NEVER repeatedly call this for approximate stochastic sampling; instead, in the calling routine, one should first perform a deterministic propagation of the signal, followed by local sampling
//
inline Vector<double> neural_network::DBN::prop_back(
                                                     const int layer0,                  // layer to propagate signal from
                                                     const int layern,                  // "                       " to
                                                     // -----
                                                     const PropagationType prop_type,   // type of propagation of signal
                                                     // -----
                                                     Vector<double> x                   // data
                                                     ) const
{
    //=========================================================
    // ERROR CHECKS
    //=========================================================
    
    // << jmm: see JMMs_CPP_style_guide for a note here about error checking >>
    
    if(layern > layer0)
    {
        throw std::runtime_error("error in neural_network::DBN::prop_back(): (layern > layer0)");
    }
    
    if(layern < 0)
    {
         throw std::runtime_error("error in neural_network::DBN::prop_back(): (layern < 0)");
    }

    if(layer0 > (this->RBM.size()+1))
    {
        throw std::runtime_error("error in neural_network::DBN::prop_back(): (layer0 > (this->RBM.size()+1))");
    }
    
    
    //=========================================================
    // PROPAGATION
    //=========================================================
    
    activation_function::Logistic logistic;
    
    //---------------------------------------------------------
    // STOCHASTIC SAMPLING
    //---------------------------------------------------------
    if(prop_type == neural_network::DBN::PropagationType::stochastic)
    {
        // note: see the notes for deterministic propagation below
        
        for(auto i = (layer0-1); i >= layern; --i)
        {
            x = logistic.f(this->RBM[i].b + transpose(this->RBM[i].W)*x);
            
            x = distribution::binomial<double>(1, x);
        }
    }
    //---------------------------------------------------------
    // PSEUDO-STOCHASTIC SAMPLING
    //---------------------------------------------------------
    else if(prop_type == neural_network::DBN::PropagationType::pseudo_stochastic)
    {
        // note: see the notes for deterministic propagation below
        
        // first compute the deterministic propagation ...
        // note: we do not make a self call to prop_back() to avoid error checks, etc. twice
        for(auto i = (layer0-1); i >= layern; --i)
        {
            x = logistic.f(this->RBM[i].b + transpose(this->RBM[i].W)*x);
        }
        
        // ... and then sample from it
        // note: only if we propagation occurred, otherwise return the starting vector x
        if(layer0 != layern)
        {
            x = distribution::binomial<double>(1, x);
        }
    }
    //---------------------------------------------------------
    // DETERMINISTIC SAMPLING
    //---------------------------------------------------------
    else if(prop_type == neural_network::DBN::PropagationType::deterministic)
    {
        // note: we start by assuming that x is the hidden activations of the RBM of (layer0-1)
        
        // note: the following loop then handles all cases (general layer0, plus layer0 == 1 or 0) 
        
        for(auto i = (layer0-1); i >= layern; --i)
        {
            // note: [nh x nv]^T * [nh x 1] = [nv x 1]
            x = logistic.f(this->RBM[i].b + transpose(this->RBM[i].W)*x);
        }
    }

 
    return x;
}
