#include "network.h"




///// LAYER CLASS /////

// constructor / destructor //
Layer::Layer(int neurons_Number){
	
	this->neurons_Number = neurons_Number;
	
	for(int i = 0; i < neurons_Number; i++){
		neurons.push_back(new Neuron());
	}
}

Layer::~Layer(){
	
	for(int i = 0; i < neurons_Number; i++){
		delete neurons[i];
	}
}

// Get things //

int Layer::get_Neurons_Number(){
	return this->neurons_Number;
}

Neuron *Layer::get_Neuron(int i){
	if (i > get_Neurons_Number() - 1 || i < 0){
		return 0;
	}
        return neurons[i];
}
///// NETWORK CLASS /////

// constructor / destructor //

Network::Network(int layers_Number, int *neurons_Per_Layer){
	
	this->layers_Number = layers_Number;
	
	for(int i = 0; i < layers_Number; i++){
		layers.push_back(new Layer(neurons_Per_Layer[i]));
	}
}

Network::~Network(){
	
	for(int i = 0; i < layers_Number; i++){
		delete layers[i];
	}
}

// get things //
int Network::get_Layers_Number(){
	return layers_Number;
}

// init random weights //
void Network::randomize(){
	
	for(int l = 1; l < get_Layers_Number(); l++){
		for(int n = 0; n < layers[l]->get_Neurons_Number(); n++){
			for(int i = 0; i < layers[l]->neurons[n]->get_Input_Number(); i++){
				layers[l]->neurons[n]->input[i]->w = 1 - (double(1.0*rand()/((double)RAND_MAX + 1)));					
			}
		}
	}
}

// init links //

void Network::init(int func){
	
	Layer *cur;
	Layer *prev;
	Neuron *pntr;

	int l = 0;

	cur = layers[l++];

	for(int n = 0; n < cur->get_Neurons_Number(); n++){
		pntr = cur->neurons[n];
		pntr->function = func;
		pntr->add_Input();

		pntr->input[0]->w = 1;
	}

	for(int i = 0; i < layers_Number - 1; i++){
		prev = cur;
		cur = layers[l++];
		
		for(int n  = 0; n < cur->get_Neurons_Number(); n++){
			pntr = cur->neurons[n];
			pntr->function = func;
			
			for(int m = 0; m < prev->get_Neurons_Number(); m++){
				pntr->add_Input(prev->get_Neuron(m));
			}
		}
	}
}

// Backpropagation algorithm //

void Network::backprop(const double *desired_Out_Vect){
	double delta, dw, out_Val;
	// get output error //
	for(int n = 0; n < layers[layers_Number - 1]->get_Neurons_Number(); n++){
		out_Val = layers[layers_Number - 1]->neurons[n]->out_Val;
		layers[layers_Number - 1]->neurons[n]->delta = layers[layers_Number - 1]->neurons[n]->d_Func(out_Val) * (desired_Out_Vect[n] - out_Val);
		// layers[layers_Number - 1]->neurons[n]->delta = out_Val * (1 - out_Val) * (desired_Out_Vect[n] - out_Val);
	}

	// get deltas for each node of the hidden(s) layer(s) //
	// delta = f'(outVal) * sum(w * delta(previous layer)) //
	for(int l = layers_Number -2; l > 0; l--){
		for(int n = 0; n < layers[l]->get_Neurons_Number(); n++){
			delta = 0;
			for(int i = 0; i < layers[l]->neurons[n]->get_Output_Number(); i++){
				delta += layers[l]->neurons[n]->output[i]->w * layers[l]->neurons[n]->output[i]->in_Neuron->delta;
			}
			out_Val = layers[l]->neurons[n]->out_Val;
			layers[l]->neurons[n]->delta = layers[l]->neurons[n]->d_Func(out_Val) * delta;
		}
	}

	// correct the weights //
	for(int l = 1; l < layers_Number; l++){
		for(int n = 0; n < layers[l]->get_Neurons_Number(); n++){
			for(int i = 0; i < layers[l]->neurons[n]->get_Input_Number(); i++){
				// dw = lambda * inval * delta + alpha * prev_dw
				dw = lambda * layers[l]->neurons[n]->input[i]->inVal * layers[l]->neurons[n]->delta;
				dw += alpha * layers[l]->neurons[n]->input[i]->prev_dw;
				layers[l]->neurons[n]->input[i]->prev_dw = dw;
				// correct the weight //
				layers[l]->neurons[n]->input[i]->w += dw;
			}
		}
	}

}

// returns true if the network as been trained tis time; false if its ok //
bool Network::train(const double *in_Vect, double *out_Vect, const double *desired_Out_Vect, double error){
	
	double Error = 0;

	// run network with in_Vect and out_Vect //
	run(in_Vect, out_Vect);
	
	for(int n = 0; n < layers[layers_Number - 1]->get_Neurons_Number(); n++){
		Error = fabs(out_Vect[n] - desired_Out_Vect[n]);
		if( Error > error)
			break;

	}
	// if at least one of the nodes as a too big error, backprop //
	if(Error > error){
		backprop(desired_Out_Vect);
		return true;
	}else{
		return false;
	}
}

void Network::run(const double *in_Vect, double *out_Vect){
	
	// input layer loading //
	for(int n = 0; n < layers[0]->get_Neurons_Number(); n++){
		layers[0]->neurons[n]->input[0]->inVal = in_Vect[n];
		layers[0]->neurons[n]->fire();
	}

	// other layers processing //
	for(int l = 1; l < get_Layers_Number(); l++){
		for(int n = 0; n < layers[l]->get_Neurons_Number(); n++){
			layers[l]->neurons[n]->fire();
		}
	}

	// set output //
	output(out_Vect);
}	

void Network::output(double *out_Vect){
	
	for(int n = 0; n < layers[layers_Number - 1]->get_Neurons_Number(); n++){
		out_Vect[n] = layers[layers_Number - 1]->neurons[n]->out_Val;
	}
}

int Network::set_Alpha(double alpha){
	if( 0 <= alpha && alpha < 1){
		this->alpha = alpha;
		return 0;
	}
	return 1;
}

int Network::set_Lambda(double lambda){	
	if( 0 <= lambda && lambda < 1){
		this->lambda = lambda;
		return 0;
	}
	return 1;
}
	








