/*
 Copyright 2013--Present JMM_PROGNAME
 
 This file is distributed under the terms of the JMM_PROGNAME License.
 
 You should have received a copy of the JMM_PROGNAME License.
 If not, see <JMM_PROGNAME WEBSITE>.
*/
// CREATED    : 10/12/2015
// LAST UPDATE: 10/12/2015

#include "statsxx/machine_learning/neural_network/deep_belief_network/DBN.hpp"

// jScience
#include "jScience/linalg/Vector.hpp" // Vector<>
#include "jrandnum.hpp"               // rand_num_uniform_Mersenne_twister()


// generate a sample from the distribution of patterns learned (generative sampling)
inline Vector<double> neural_network::DBN::sample(
                                                  const bool mean_field,   // mean-field approximation
                                                  // -----  
                                                  const int K,             // number of Markov iterations
                                                  // -----                                                  
                                                  Vector<double> x         // initial vector (optional -- otherwise, a random activation vector at the top RBM is generated) 
                                                  ) const
{
    std::cout << "x.size(): " << x.size() << std::endl;
    
    std::cout << "here0" << std::endl;
    
    std::cout << "x.size(): " << x.size() << std::endl;
    
    // note: both forward and backward, deterministic propagation will be used 
    neural_network::DBN::PropagationType prop_type = neural_network::DBN::PropagationType::deterministic;
    
    // determine the visible layer (global number) of the topmost RBM
    int layer = this->RBM.size() - 1;
    
    std::cout << "x.size(): " << x.size() << std::endl;
    
    // << jmm: need to change this to random hidden vector >>
    // if an initial vector was not passed, generate a random sample directly at the topmost RBM ...
    // << jmm: is this better than just letting someone pass an initial vector? does it have a better chance of randomness AT the topmost RBM? >>
    // << jmm: the following is kind of a hack .. it should better be "if(x == Vector<double>())" >> 
    if(x.size() == 0)
    {
        x = Vector<double>(this->RBM.back().get_nvis());
        
        for(auto i = 0; i < x.size(); ++i)
        {
            x(i) = rand_num_uniform_Mersenne_twister(0.0, 1.0);
        }
    }
    // ... else propagate the provided vector to the topmost RBM
    else
    {
        x = this->activations(
                              layer,
                              prop_type,
                              x
                              );
    }
    
    std::cout << "here1" << std::endl;
    
    
    // perform Gibbs sampling at the topmost RBM
    x = this->RBM.back().Gibbs(
                               K,
                               x,
                               mean_field
                               );
    
    std::cout << "here2" << std::endl;
    
    // propagate the result back to the first visible layer
    x = this->prop_back(
                        layer,
                        0,
                        prop_type,
                        x
                        );
    
    std::cout << "here3" << std::endl;
    
    return x;
}
