#include <time.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <vector>

#define uint unsigned int

#define NUM_SAMPLES 2
#define WEIGHT_STEP 0.1
#define ACCEPTABLE_ERROR 0.01

#define INITIAL_TEMP	200.0
#define FINAL_TEMP		1.0
#define TEMP_FACTOR		0.99

#define NUM_ANNEALINGS_PER_SAMPLE	100
#define	NUM_SAMPLES_FOR_AVERAGE		100

using namespace std;


class Synapse
{
public:
	int to;
	int from;
	double weight;

	Synapse(int _from = -1, int _to = -1, double _weight = 0.0)
	{
		from = _from;
		to = _to;
		weight = _weight;
	}
};


class Neuron
{
public:
	int is_visible;
	double bias;
	double value;
	vector<int> synapsis_ids;

	Neuron(bool _is_visible = false, double _bias = 0.0, double _value = 0.0)
	{
		bias = _bias;
		value = _value;
		is_visible = _is_visible;
	}
};


vector<Neuron> neurons;
vector<Synapse> synapsis;

double *positive_synapses_statistics;
double *negative_synapses_statistics;
double *positive_bias_statistics;
double *negative_bias_statistics;

double data[NUM_SAMPLES][2];


void
read_data()
{
	data[0][0] = 0.0; data[0][1] =  1.0;
	data[1][0] = 1.0; data[1][1] =  0.0;
}


void
build_achitecture()
{
	neurons.push_back(Neuron(true));	// is_visible = true
	neurons.push_back(Neuron(false));	// is_visible = false
	neurons.push_back(Neuron(false));	// ...
	neurons.push_back(Neuron(false));
	neurons.push_back(Neuron(true));

	synapsis.push_back(Synapse(0, 1)); // synapse between neuron 0 and neuron 1
	synapsis.push_back(Synapse(1, 2)); // synapse between neuron 1 and neuron 2
	synapsis.push_back(Synapse(2, 3)); // ...
	synapsis.push_back(Synapse(3, 4));
	synapsis.push_back(Synapse(0, 2));
	synapsis.push_back(Synapse(0, 3));
	synapsis.push_back(Synapse(1, 4));
	synapsis.push_back(Synapse(2, 4));

	positive_synapses_statistics = (double*) calloc (synapsis.size(), sizeof(double)); 
	negative_synapses_statistics = (double*) calloc (synapsis.size(), sizeof(double)); 
	positive_bias_statistics = (double*) calloc (neurons.size(), sizeof(double)); 
	negative_bias_statistics = (double*) calloc (neurons.size(), sizeof(double)); 
}


void
print_neurons()
{
	uint i;
	
	for (i = 0; i < neurons.size(); i++)
		printf("%0.2lf, ", neurons[i].value);
		
	printf("\n");
}


void
print_biases()
{
	uint i;
	
	for (i = 0; i < neurons.size(); i++)
		printf("%0.2lf, ", neurons[i].bias);
		
	printf("\n");
}


void
print_synapses()
{
	uint i;
	
	for (i = 0; i < synapsis.size(); i++)
		printf("%0.2lf, ", synapsis[i].weight);
		
	printf("\n");
}


void
randomize_neurons()
{
	uint i;
	
	for (i = 0; i < neurons.size(); i++)
		neurons[i].value = (double) (rand() % 2);
}


void
randomize_bias()
{
	uint i;
	
	for (i = 0; i < neurons.size(); i++)
		// neurons[i].bias = ((double) ((double) rand() / (double) RAND_MAX) * 2.0 - 1.0);
		neurons[i].bias = 0.0;
}


void
randomize_synapses()
{
	uint i;

	for (i = 0; i < synapsis.size(); i++)
		// synapsis[i].weight = ((double) ((double) rand() / (double) RAND_MAX) * 2.0 - 1.0);
		synapsis[i].weight = 0.0;
}


void
build_network()
{
	build_achitecture();
	randomize_neurons();
	randomize_bias();
	randomize_synapses();
}


double
logit(double t, double T)
{
	return (1.0 / (1.0 + exp(-t / T)));
}


double
evaluate_neuron_logit_function(int neuron_id, double T)
{
	uint i;
	double value = 0;

	for (i = 0; i < synapsis.size(); i++)
	{
		if (synapsis[i].from == neuron_id)
			value += (synapsis[i].weight * neurons[synapsis[i].to].value);
		else if (synapsis[i].to == neuron_id)
			value += (synapsis[i].weight * neurons[synapsis[i].from].value);
	}

	value += neurons[neuron_id].bias;
	return (logit(value, T));
}


double
logit_sample(double logit)
{
	double sample;
	
	sample = (double) rand() / (double) RAND_MAX;

	if (sample > logit)
		return 0.0;
	else
		return 1.0;
}


void
permute(int *vector, int size)
{
	int i, j, aux;

	for (i = size - 1; i >= 1; i--)
	{
		j = rand() % (i + 1);
		aux = vector[i];
		vector[i] = vector[j];
		vector[j] = aux;
	}
}


void
run_hidden_net(double T)
{
	uint i;
	int selected;
	double logit;
	int neuron_sequence[neurons.size()];

	for (i = 0; i < neurons.size(); i++)
		neuron_sequence[i] = i;

	permute(neuron_sequence, neurons.size());

	for (i = 0; i < neurons.size(); i++)
	{
		selected = neuron_sequence[i];
		if (!neurons[selected].is_visible)
		{
			logit = evaluate_neuron_logit_function(selected, T);
			neurons[selected].value = logit_sample(logit);
		}
	}
}


void
run_full_net(double T)
{
	uint i;
	int selected;
	double logit;
	int neuron_sequence[neurons.size()];

	for (i = 0; i < neurons.size(); i++)
		neuron_sequence[i] = i;

	permute(neuron_sequence, neurons.size());
	
	for (i = 0; i < neurons.size(); i++)
	{
		selected = neuron_sequence[i];
		logit = evaluate_neuron_logit_function(selected, T);
		neurons[selected].value = logit_sample(logit);
	}
}


void
update_synapses_statistics(double *statistics)
{
	uint i;
	int to;
	int from;
	
	for (i = 0; i < synapsis.size(); i++)
	{
		to = synapsis[i].to;
		from = synapsis[i].from;
		statistics[i] += (neurons[from].value * neurons[to].value);
	}
}


void
update_neurons_statistics(double *statistics)
{
	uint i;
	
	for (i = 0; i < neurons.size(); i++)
		statistics[i] += neurons[i].value;
}


void
init_statistics(double *synapses_statistics, double *bias_statistics)
{
	uint i;

	for (i = 0; i < synapsis.size(); i++)
		synapses_statistics[i] = 0.0;
	for (i = 0; i < neurons.size(); i++)
		bias_statistics[i] = 0.0;
}


void
sumarize_statistics(double *synapses_statistics, double *bias_statistics)
{
	uint i;

	for (i = 0; i < synapsis.size(); i++)
		synapses_statistics[i] /= ((double) NUM_SAMPLES_FOR_AVERAGE * (double) NUM_SAMPLES * (double) NUM_ANNEALINGS_PER_SAMPLE);

	for (i = 0; i < neurons.size(); i++)
		bias_statistics[i] /= ((double) NUM_SAMPLES_FOR_AVERAGE * (double) NUM_SAMPLES * (double) NUM_ANNEALINGS_PER_SAMPLE);
}


double
get_noise_value(double visible_unit_input)
{
	double noise_value;

/*	if (visible_unit_input == 0.0)
	{
		if ((rand() % 100) < 5)
			noise_value = 1.0;
		else
			noise_value = 0.0;
	}
	else
	{
		if ((rand() % 100) < 15)
			noise_value = 0.0;
		else
			noise_value = 1.0;
	}
*/
	noise_value = visible_unit_input;
	return (noise_value);
}


void
set_visible_units(int input_sample)
{
	uint i, j;

	for (i = j = 0; i < neurons.size(); i++)
	{
		if (neurons[i].is_visible)
		{
			// OBS: assumes that data dimension is equal to the number of visible neurons
			// OBS: add the data in the same order that the visible neurons show up
			neurons[i].value = get_noise_value(data[input_sample][j]);
			j++;
		}
	}
}


void
positive_phase()
{
	int a;
	uint i;
	double T;
	int input_sample;
	
	init_statistics(positive_synapses_statistics, positive_bias_statistics);

	for (a = 0; a < NUM_ANNEALINGS_PER_SAMPLE; a++)
	{
		for (input_sample = 0; input_sample < NUM_SAMPLES; input_sample++)
		{
			randomize_neurons();
			set_visible_units(input_sample);

			for (T = INITIAL_TEMP; T >= FINAL_TEMP; T = T * TEMP_FACTOR)
				run_hidden_net(T);

			for (i = 0; i < NUM_SAMPLES_FOR_AVERAGE; i++)
			{
				run_hidden_net(FINAL_TEMP);
				update_synapses_statistics(positive_synapses_statistics);
				update_neurons_statistics(positive_bias_statistics);
			}
		}
	}

	sumarize_statistics(positive_synapses_statistics, positive_bias_statistics);
}


void
negative_phase()
{
	int a;
	uint i;
	double T;
	
	init_statistics(negative_synapses_statistics, negative_bias_statistics);
	
	for (a = 0; a < NUM_ANNEALINGS_PER_SAMPLE * NUM_SAMPLES; a++)
	{
		randomize_neurons();

		for (T = INITIAL_TEMP; T >= FINAL_TEMP; T = T * TEMP_FACTOR)
			run_full_net(T);

		for (i = 0; i < NUM_SAMPLES_FOR_AVERAGE; i++)
		{
			run_full_net(FINAL_TEMP);
			update_synapses_statistics(negative_synapses_statistics);
			update_neurons_statistics(negative_bias_statistics);
		}
	}

	sumarize_statistics(negative_synapses_statistics, negative_bias_statistics);
}


void
synapse_update()
{
	uint i;
	
	for (i = 0; i < synapsis.size(); i++)
	{
		if (fabs(positive_synapses_statistics[i] - negative_synapses_statistics[i]) != 0.0)
			synapsis[i].weight = synapsis[i].weight + WEIGHT_STEP * ((positive_synapses_statistics[i] - negative_synapses_statistics[i])) / fabs((positive_synapses_statistics[i] - negative_synapses_statistics[i]));

		fprintf(stderr, "%lf ", synapsis[i].weight);
	}

}


void
bias_update()
{
	uint i;
	
	for (i = 0; i < neurons.size(); i++)
	{
		if (fabs(positive_bias_statistics[i] - negative_bias_statistics[i]) != 0.0)
			neurons[i].bias = neurons[i].bias + WEIGHT_STEP * ((positive_bias_statistics[i] - negative_bias_statistics[i])) / fabs((positive_bias_statistics[i] - negative_bias_statistics[i]));

		fprintf(stderr, "%lf ", neurons[i].bias);
	}

	fprintf(stderr, "\n");
}


int
convergence_test()
{
	uint i;
	double error = 0.0;

	for (i = 0; i < synapsis.size(); i++)
		error += (positive_synapses_statistics[i] - negative_synapses_statistics[i]) * (positive_synapses_statistics[i] - negative_synapses_statistics[i]);

	for (i = 0; i < synapsis.size(); i++)
		error += (positive_bias_statistics[i] - negative_bias_statistics[i]) * (positive_bias_statistics[i] - negative_bias_statistics[i]);

	error = sqrt(error);
	//fprintf(stderr, "%lf\n", error);

	if (error < ACCEPTABLE_ERROR)
		return (1);
	else
		return (0);
}


void
train()
{
	int converged = 0;
	int num_iteractions = 0;
	int max_iteractions = (200 * neurons.size());
	
	while (!converged && (num_iteractions < max_iteractions))
	{
		if (num_iteractions % 50 == 0)
			printf("iteraction: %d\n", num_iteractions);

		positive_phase();
		negative_phase();
		synapse_update();
		bias_update();
		converged = convergence_test();
		num_iteractions++;
	}

	printf("converged = %d\n\n", converged);
}


void
run()
{
	int i;
	double T;

	for (i = 0; i < 20; i++)
	{
		randomize_neurons();

		for (T = INITIAL_TEMP; T >= FINAL_TEMP; T = T * TEMP_FACTOR)
			run_full_net(T);

		run_full_net(FINAL_TEMP);
		print_neurons();
	}
}


int
main()
{
	srand(time(NULL));
	
	build_network();
	read_data();
	train();
	run();
	printf("\n");
	print_synapses();
	printf("\n");
	print_biases();
	
	return (0);
}
