/*
 * jjann.c
 *
 *  Created on: Sep 4, 2013
 *      Author: Alan
 */

#include "jjann.h"

#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#include "types.h"

void ffInputLayer(netdata_t* net, float32* inputs);
void ffHiddenLayer(netdata_t* net, uint32 layer);
void ffOutputLayer(netdata_t* net, float32* results);

/**
 * FEED FORWARD
 */
void feedForward(netdata_t* net, float32* inputs, float32* outputs) {
	uint32 i;
	//fprintf(stderr, "Start FF\n");

	ffInputLayer(net, inputs);
	//fprintf(stderr, "input\n");
	for (i = 1; i < net->layersize - 1; i++) {

		ffHiddenLayer(net, i);
//		fprintf(stderr, "hidden %d\n", i);
	}

	ffOutputLayer(net, outputs);
	//fprintf(stderr, "Output\n");

}

void feedForwardV(vectorizedNet_tp net, float32* inputs, float32* outputs) {
	uint32 layer;
	float32 *tmpFloats;
	matrix_tp inp;
	matrix_tp tmp;
//	fprintf(stderr,"Number of inputs %d\n", neuronsInLayer(net, 0));
	inp = newMatrix(neuronsInLayer(net, 0), 1);
	setColumn(inp, 0, inputs, 0);
//	exit(-1);
	//fprintf(stderr,"Input:\n");
	//printMatrix(inp);
	//input layer
	freeData(net->a[0]);
	free(net->a[0]);
	net->a[0] = copyMatrix(inp);
	freeData(net->z[0]);
	free(net->z[0]);
	net->z[0] = copyMatrix(inp);
	freeData(inp);
	free(inp);

	for (layer = 1; layer < net->numLayers; layer++) {
		//fprintf(stderr,"Computing Layer %d\n", layer);
		tmp = multiply(net->w[layer - 1], net->z[layer - 1]);
		freeData(net->a[layer]);
		free(net->a[layer]);
		net->a[layer] = add(tmp, net->b[layer - 1]);
		freeData(net->z[layer]);
		free(net->z[layer]);
		net->z[layer] = newMatrix(net->a[layer]->rows, 1);
		tmpFloats = net->af[layer](net, net->a[layer]->data, net->a[layer]->rows);
		setColumn(net->z[layer], 0, tmpFloats, 0);
		freeData(tmp);
		free(tmp);
		free(tmpFloats);
		//fprintf(stderr,"Layer %d computed\n", layer);
	}

	memcpy(outputs, (net->z[net->numLayers - 1]->data), (net->z[net->numLayers - 1]->rows) * sizeof(float32));
}
void ffInputLayer(netdata_t* net, float32* inputs) {
	uint32 j;
	neuron_t* tmp;
	/*fprintf(stderr, "Start Input ff\n");
	 assert(net != NULL);
	 assert(net->neurons != NULL);*/
	for (j = 0; j < net->neurons[0]; j++) {
		//fprintf(stderr, "input[%d] -> %f1.0\n", j, inputs[j]);
		tmp = getNeuron(net, 0, j);
		tmp->z = inputs[j];
	}
}

void ffHiddenLayer(netdata_t* net, uint32 layer) {
	uint32 j = 0, k = 0;
	neuron_tp self, from;

	for (j = 0; j < net->neurons[layer]; j++) {
		//fprintf(stderr,"N(%d,%d) ", layer, j);
		self = getNeuron(net, layer, j);
		//fprintf(stderr," has %d inputs plus bias. \n", self->n_input);
		self->a = .0f;
		if (self->n_input > 0) {
			for (k = 0; k < self->n_input + 1; k++) { //+1 means considering also the bias weight
				//fprintf(stderr,"input -> %d ", k);
				from = self->inputs[k];
				//fprintf(stderr,"is N(%d,%d) ", from->layer, from->pos);
				//fprintf(stderr," selected ");
				//TODO: ricontrollare questa formula, in caso di not fully connected non è giusta
				self->a += from->weights[j] * from->z; //in case of outgoing weights
				//fprintf(stderr," and weight selected\n");
			}
			self->z = self->activationFunction(net, self->a);
		}
	}
}

void ffOutputLayer(netdata_t* net, float32* results) {
	uint32 j = 0, k = 0, layer;
	neuron_tp self, from;

	//ffHiddenLayer(net, net->layersize - 1);
	layer = net->layersize - 1;
	for (j = 0; j < net->neurons[layer]; j++) {
		self = getNeuron(net, layer, j);
		//self->a = self->weights[self->n_input]; //bias
		//fprintf(stderr, "n(%d,%d)-> a = ", layer, j);
		self->a = .0f;
		//for (k = 0; k < self->n_input; k++) {
		for (k = 0; k < self->n_input + 1; k++) { //+1 means considering also the bias weight
			//from = getNeuron(net, layer - 1, k);
			from = self->inputs[k];
			self->a += from->weights[j] * from->z; //in case of outgoing weights
			//fprintf(stderr, "+ %f * %f ", from->weights[j], from->z);
			//self->a += self->weights[j] * from->z;
		}
	}

	//splitting the computation in 2 two fors beacause the activation function could be
	//a softmax activation gucntion which need some information more to be computed before
	for (j = 0; j < net->neurons[layer]; j++) {
		self = getNeuron(net, layer, j);
		self->z = self->activationFunction(net, self->a);
		//fprintf(stderr, " z = sigmoid(%f) = %f\n", self->a, self->z);
	}

	for (j = 0; j < net->neurons[net->layersize - 1]; j++) {
		self = getNeuron(net, net->layersize - 1, j);
		results[j] = self->z;
	}
}

