/*
This file is part of cdbnlib.

    cdbnlib is free software: you can redistribute it and/or modify it under
    the terms of the GNU Lesser General Public License as published by the Free
    Software Foundation, either version 3 of the License, or (at your option)
    any later version.

    cdbnlib is distributed in the hope that it will be useful, but WITHOUT ANY
    WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
    FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
    more details.

    You should have received a copy of the GNU Lesser General Public License
    along with cdbnlib.  If not, see <http://www.gnu.org/licenses/>.
*/


#include "cdbnlib.h"
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>

#define LOGISTIC_FUNC(bias,input) (1.0/(1.0+exp(-(bias)-(input))))
#define INDEX2D(i,j,icount) ((i)+(j)*(icount))
#define WEIGHTCOST 0.0002
#define INITIALMOMENTUM 0.5
#define FINALMOMENTUM 0.9
#define RANDUNIFORM (random()/(double)RAND_MAX)

/* This function creates a new Restricted Boltzmann Machine
 *
 * rbm points to an already allocated struct of the type
 * dl_rbm_t
 *
 * vis points to a previously allocated and created structure
 * of the type dl_layer_t
 *
 * hid points to a previously allocated and created structure
 * of the type dl_layer_t
 */
void dl_create_rbm(dl_rbm_t *rbm, dl_layer_t *vis, dl_layer_t *hid)
{
	// Copy the pointer to the visible and hidden
	// layers
	rbm->vis = vis;
	rbm->hid = hid;

	// Calculate the number of synapses
	rbm->weight_count = vis->count * hid->count;

	// Allocate the memory for the synapses
	if ((rbm->weight = (double *)malloc(rbm->weight_count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm weight malloc error");
		exit(2);
	}

	// Allocate the memory for the learning
	if ((rbm->posprods = (double *)malloc(rbm->weight_count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm posprods malloc error");
		exit(2);
	}
	if ((rbm->negprods = (double *)malloc(rbm->weight_count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm negprods malloc error");
		exit(2);
	}
	if ((rbm->weight_inc = (double *)malloc(rbm->weight_count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm weight_inc malloc error");
		exit(2);
	}
	if ((rbm->hidbiasinc = (double *)malloc(rbm->hid->count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm hidbiasinc malloc error");
		exit(2);
	}
	if ((rbm->visbiasinc = (double *)malloc(rbm->vis->count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm visbiasinc malloc error");
		exit(2);
	}
	if ((rbm->poshidact = (double *)malloc(rbm->hid->count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm poshidact malloc error");
		exit(2);
	}
	if ((rbm->posvisact = (double *)malloc(rbm->vis->count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm posvisact malloc error");
		exit(2);
	}
	if ((rbm->neghidact = (double *)malloc(rbm->hid->count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm neghidact malloc error");
		exit(2);
	}
	if ((rbm->negvisact = (double *)malloc(rbm->vis->count*sizeof(double))) == NULL)
	{
		perror("dl_new_rbm posvisact malloc error");
		exit(2);
	}

	// Init the weights
	dl_init_rbm(rbm);
}

/* This function creates a new associative layer in a
 * Restricted Boltzmann Machine
 *
 * rbm points to a previously allocated struct of the type
 * dl_rbm_assoc_t
 *
 * vis_list points to a existing list of pointers to
 * existing structs of the type dl_layer_t
 *
 * hid points to a previously allocated and created structure
 * of the type dl_layer_t
 */
void dl_create_rbm_assoc(dl_rbm_assoc_t *rbm, dl_layer_t **vis_list, int n, dl_layer_t *hid)
{
	// Store the number of channels
	rbm->n = n;

	// Allocate the memory for the internal RBM of each channel
	if ((rbm->rbm = (dl_rbm_t **)malloc(n*sizeof(dl_rbm_t *))) == NULL)
	{
		perror("dl_new_rbm_assoc dl_rbm_t * malloc error");
		exit(2);
	}

	// Store pointer to the visible layers of each channel
	rbm->vis = vis_list;

	// Create the RBMs
	int i;
	for (i = 0 ; i < n ; ++i)
	{
		if ((rbm->rbm[i] = (dl_rbm_t *)malloc(sizeof(dl_rbm_t))) == NULL)
		{
			perror("dl_new_rbm_assoc dl_rbm_t malloc error");
			exit(2);
		} else {
			dl_create_rbm(rbm->rbm[i], vis_list[i], hid);
		}
	}
}

/* This function initializes the weights of an RBM
 */
void dl_init_rbm(dl_rbm_t *rbm)
{
	int i;
	double *pos = rbm->weight;
	for (i = 0 ; i < rbm->weight_count / 2 ; ++i)
	{
		dl_get_box_muller_random_normal_2d(pos,pos+1,0.1);
		pos += 2;
	}
	if (rbm->weight_count % 2 != 0)
	{
		double discard;
		dl_get_box_muller_random_normal_2d(pos,&discard,0.1);
	}
}

/* Frees the memory of allocated by the structure
 *
 * Attention, it does not free the structure itself
 */
void dl_del_rbm(dl_rbm_t *rbm)
{
	free(rbm->weight);
	free(rbm->posprods);
	free(rbm->negprods);
	free(rbm->weight_inc);
	free(rbm->hidbiasinc);
	free(rbm->visbiasinc);
	free(rbm->poshidact);
	free(rbm->posvisact);
	free(rbm->neghidact);
	free(rbm->negvisact);
}

/* This function updates the probabilities in the
 * hidden layer
 */
void dl_rbm_update_hid_from_sample(dl_rbm_t *rbm, char *sample)
{
	int i,j;
	for (j = 0 ; j < rbm->hid->count ; ++j)
	{
		double sum_hid = 0.0;
		for (i = 0 ; i < rbm->vis->count ; ++i)
		{
			if (sample[i])
				sum_hid += rbm->weight[INDEX2D(i,j,rbm->vis->count)];
		}
		rbm->hid->prob[j] = LOGISTIC_FUNC(rbm->hid->bias[j],sum_hid);
	}
}

/* This function updates the probabilities in the
 * hidden layer
 */
void dl_rbm_update_hid(dl_rbm_t *rbm)
{
	dl_rbm_update_hid_from_sample(rbm,rbm->vis->sample);
}

/* This function updates the probabilities in the
 * hidden layer of an associative RBM
 */ 
void dl_rbm_assoc_update_hid_from_sample(dl_rbm_assoc_t *rbm, char **sample)
{
	int i;
	for (i = 0 ; i < rbm->n ; ++i)
	{
		dl_rbm_update_hid_from_sample(rbm->rbm[i], sample[i]);
	}
}

/* This function updates the probabilities in the
 * hidden layer of an associative RBM
 */ 
void dl_rbm_assoc_update_hid(dl_rbm_assoc_t *rbm)
{
	int i;
	for (i = 0 ; i < rbm->n ; ++i)
	{
		dl_rbm_update_hid(rbm->rbm[i]);
	}
}


/* This function updates the probabilities in the
 * visible layer
 */
void dl_rbm_update_vis_from_sample(dl_rbm_t *rbm, char *sample)
{
	int i,j;
	for (i = 0 ; i < rbm->vis->count ; ++i)
	{
		double sum_vis = 0.0;
		for (j = 0 ; j < rbm->hid->count ; ++j)
		{
			if (sample[j])
				sum_vis += rbm->weight[INDEX2D(i,j,rbm->vis->count)];
		}
		rbm->vis->prob[i] = LOGISTIC_FUNC(rbm->vis->bias[i],sum_vis);
	}
}

/* This function updates the probabilities in the
 * visible layer
 */
void dl_rbm_update_vis(dl_rbm_t *rbm)
{
	dl_rbm_update_vis_from_sample(rbm, rbm->hid->sample);
}

/* This function updates the probabilities in the
 * visible layers of an associative RBM
 */
void dl_rbm_assoc_update_vis_from_sample(dl_rbm_assoc_t *rbm, char **sample)
{
	int i;
	for (i = 0 ; i < rbm->n ; ++i)
	{
		dl_rbm_update_vis_from_sample(rbm->rbm[i],sample[i]);
	}
}

/* This function updates the probabilities in the
 * visible layers of an associative RBM
 */
void dl_rbm_assoc_update_vis(dl_rbm_assoc_t *rbm)
{
	int i;
	for (i = 0 ; i < rbm->n ; ++i)
	{
		dl_rbm_update_vis(rbm->rbm[i]);
	}
}

/* Reset all the positive and negative phase learning
 * accumulators
 */
void dl_rbm_reset_all_accumulators(dl_rbm_t *rbm)
{
	memset(rbm->posprods, 0, rbm->weight_count*sizeof(double));
	memset(rbm->negprods, 0, rbm->weight_count*sizeof(double));
	memset(rbm->weight_inc, 0, rbm->weight_count*sizeof(double));
	memset(rbm->visbiasinc, 0, rbm->vis->count*sizeof(double));
	memset(rbm->hidbiasinc, 0, rbm->hid->count*sizeof(double));
	dl_rbm_reset_epoch_accumulators(rbm);
}

/* Reset the positive and negative phase learning
 * accumulators to begin another epoch
 */
void dl_rbm_reset_epoch_accumulators(dl_rbm_t *rbm)
{
	memset(rbm->poshidact, 0, rbm->hid->count*sizeof(double));
	memset(rbm->posvisact, 0, rbm->vis->count*sizeof(double));
	memset(rbm->neghidact, 0, rbm->hid->count*sizeof(double));
	memset(rbm->negvisact, 0, rbm->vis->count*sizeof(double));
	rbm->accerr = 0.0;
}

/* This function trains the RBM using contrastive divergence
 */
void dl_rbm_train(dl_rbm_t *rbm, double *input_prob, double learning_rate,
			double momentum, char reset_all, char reset_epoch)
{
	int i,j;
	double diff = 0.0;

	// Reset probability accumulators
	if (reset_all != 0)
	{
		dl_rbm_reset_all_accumulators(rbm);
	} else if (reset_epoch != 0)
	{
		dl_rbm_reset_epoch_accumulators(rbm);
	}

	// P O S I T I V E   P H A S E

	// Update probabilities
	dl_layer_get_sample_with_prob(rbm->vis, input_prob);
	dl_rbm_update_hid(rbm);
	// Accumulate the positive hidden products
	// for the contrastive divergence
	for (j = 0 ; j < rbm->hid->count ; ++j)
	{
		for (i = 0 ; i < rbm->vis->count ; ++i)
		{
			rbm->posprods[INDEX2D(i,j,rbm->vis->count)]+=
				input_prob[i]*rbm->hid->prob[j];
		}
		rbm->poshidact[j]+=rbm->hid->prob[j];
	}
	for (i = 0 ; i < rbm->vis->count ; ++i)
	{
		rbm->posvisact[i]+=input_prob[i];
	}
	// Sample the hidden states
	dl_layer_get_sample(rbm->hid);

	// N E G A T I V E   P H A S E

	// Update probabilities
	dl_rbm_update_vis(rbm);
	dl_rbm_update_hid(rbm);
	// Accumulate the negative hidden products
	// for the contrastive divergence
	for (j = 0 ; j < rbm->hid->count ; ++j)
	{
		for (i = 0 ; i < rbm->vis->count ; ++i)
		{
			rbm->negprods[INDEX2D(i,j,rbm->vis->count)]+=
				rbm->vis->prob[i]*rbm->hid->prob[j];
		}
		rbm->neghidact[j]+=rbm->hid->prob[j];
	}
	for (i = 0 ; i < rbm->vis->count ; ++i)
	{
		rbm->negvisact[i]+=rbm->vis->prob[i];
		diff = rbm->vis->prob[i]-input_prob[i];
		rbm->accerr += diff*diff;
	}

	// Update weights and biases
	for (i = 0 ; i < rbm->weight_count ; ++i)
	{
		rbm->weight_inc[i] *= momentum;
		rbm->weight_inc[i] +=
			learning_rate*(
				rbm->posprods[i] - rbm->negprods[i] -
					WEIGHTCOST*rbm->weight[i]
			);
		rbm->weight[i] += rbm->weight_inc[i];
	}
	for (i = 0 ; i < rbm->vis->count ; ++i)
	{
		rbm->visbiasinc[i] *= momentum;
		rbm->visbiasinc[i] +=
			learning_rate*(
				rbm->posvisact[i] - rbm->negvisact[i]
			);
	}
	for (j = 0 ; j < rbm->hid->count ; ++j)
	{
		rbm->hidbiasinc[j] *= momentum;
		rbm->hidbiasinc[j] +=
			learning_rate*(
				rbm->poshidact[j] - rbm->neghidact[j]
			);
	}
}

void dl_rbm_assoc_train(dl_rbm_assoc_t *rbm, double **input_prob_list, int n,
	double learning_rate,double momentum, char reset_all, char reset_epoch)
{
	int i;
	for (i = 0 ; i < n ; ++i)
	{
		dl_rbm_train(rbm->rbm[i], input_prob_list[i], 
			learning_rate, momentum, reset_all, reset_epoch);
	}
}

