#include "neuralnetwork.h"
#include "neuron.h"
#include "writer.h"
#include <math.h>

NeuralNetwork::NeuralNetwork(int layerCount,int neuronCount,int classCount,int attributeCount,int nrIterations) {
	
	this->layerCount = layerCount;
	this->neuronCount = neuronCount;
	this->classCount = classCount;
	this->attributeCount = attributeCount;
	this->nrIterations = nrIterations;

	this->writer = Writer(nrIterations);

	max = new double[attributeCount];
	min = new double[attributeCount];

	h = new double[neuronCount];
	y = new double[classCount];
	deltaY = new double[classCount];
	deltaH = new double[neuronCount];
	result = new int*[150];
	for(int i = 0; i < 150; i++) {
		result[i] = new int[classCount]; 
	}

	hiddenLayer = new Neuron[neuronCount];
	outputLayer = new Neuron[classCount];

	for (int i = 0 ; i < neuronCount; i++) {
		//hiddenLayer[i] = Neuron();
		hiddenLayer[i].init(attributeCount);
	}

	for (int i = 0;  i < classCount; i++) {
		//outputLayer[i] = Neuron();
		outputLayer[i].init(neuronCount);
	}

	initializeSets();
	learningRate = 0.05;
}

void NeuralNetwork::initializeSets() {
	for (int i = 0; i < 150; i++){
		for (int j = 0; j < 7; j++){
			trainSet[i][j] = testSet[i][j] = dataSet[i][j] = 0;
		}
		for (int j = 0; j < classCount; j++) {
			result[i][j] = 0;
		}
	}

}

void NeuralNetwork::copyRowToTrainSet(int from,int to) {
	for (int i = 0; i < 7; i++) {
		trainSet[to][i] = dataSet[from][i];
	}
}

void NeuralNetwork::copyRowToTestSet(int from,int to) {
	for (int i = 0; i < 7; i++) {
		testSet[to][i] = dataSet[from][i];
	}
}

void NeuralNetwork::readInput(char *fileName) {

	char type[50];
	FILE *f = fopen(fileName,"r");
	
	min[0] = min[1] = min[2] = min[3] = 1000;
	max[0] = max[1] = max[2] = max[3] = -1;

	for (int i = 0; i < 150; i++) {
	
		for (int j = 0; j < 7; j++) {
			dataSet[i][j] = 0;
		}

		fscanf(f,"%lf",&dataSet[i][0]);
		fscanf(f,"%lf",&dataSet[i][1]);
		fscanf(f,"%lf",&dataSet[i][2]);
		fscanf(f,"%lf",&dataSet[i][3]);

		for (int j = 0; j < 4 ; j++) {
			max[j] = (max[j] < dataSet[i][j])? dataSet[i][j] : max[j];
			min[j] = (min[j] > dataSet[i][j])? dataSet[i][j] : min[j];
		}

		fscanf(f,"%s",&type);
		if (strcmp(type,"Iris-setosa") == 0) {
			dataSet[i][4] = 1;
		} else if (strcmp(type,"Iris-versicolor") == 0) {
			dataSet[i][5] = 1;
		} else if (strcmp(type,"Iris-virginica") == 0) {
			dataSet[i][6] = 1;
		}
	}

	fclose(f);
}

void NeuralNetwork::splitData() {

	int k = 0, j = 0;
	for (int i = 0; i < 150 ; i++) {
	
		if (i%3 == 0) {
			this->copyRowToTestSet(i,j);
			j++;
		} else {
			this->copyRowToTrainSet(i,k);
			k++;
		}
	}

}

void NeuralNetwork::normalize() {
	
	double adder = 0.02;
	double multiplier = 0.98;

	for (int i = 0; i < 150 ; i++) {
		for (int j = 0; j < attributeCount; j++) {
			dataSet[i][j] = ((dataSet[i][j] - min[j])/(max[j] - min[j]) + adder) * multiplier ;
		}
	}
}

void NeuralNetwork::trainNetwork() {
	
	writer.startBuffer();

	for (int i = 0; i < 100; i++) {
		computeOutput(i);
		computeErrors(i);
		updateWeights(i);
	}

	writer.flushBuffer();
}

void NeuralNetwork::testNetwork() {
	for (int i = 100; i < 150; i++) {
		computeOutput(i);
	}

	writer.goodResults = 0;
	writer.badResults = 0;

	for (int i = 0; i < 50; i++) {

		bool classifiedOK = true;
		for (int j = 0; j < classCount; j++) {
			if (testSet[i][j + 4] != result[i+100][j]) {
				classifiedOK = false;
			}
		}
		(classifiedOK == false)? writer.badResults++ : writer.goodResults++;
	}
}

void NeuralNetwork::computeOutput(int k) {

	double max = -10;
	int index = -1;

	for (int j = 0; j < neuronCount; j++) {
		if (k < 100) {
			h[j] = hiddenLayer[j].getOutput(trainSet[k]);
		} else {
			h[j] = hiddenLayer[j].getOutput(testSet[k - 100]);
		}
	}

	for (int j = 0; j < classCount; j++) {
		y[j] = outputLayer[j].getOutput(h);
		if (max < y[j]) {
			max = y[j];
			index = j;
		}
	}

	for (int i = 0; i < classCount; i++) {
		if (i == index) {
			result[k][i] = 1;
		}
		else {
			result[k][i] = 0;
		}
	}
}

void NeuralNetwork::computeErrors(int k) {	

	double aux = 0;
	double euclidianDistance = 0;
	
	for (int i = 0; i < classCount; i++) {
		deltaY[i] = y[i] * (1 - y[i]) * (trainSet[k][4 + i] - y[i]);
		euclidianDistance = pow(y[i] - trainSet[k][4+i],2);
	}

	for (int i = 0; i < neuronCount; i++) {
		for (int j = 0; j < classCount; j++){
			aux += deltaY[j] * outputLayer[j].weights[i];
		}
		deltaH[i] = h[i] * (1 - h[i]) * aux;
		aux = 0;
	}

	writer.absoluteErrorBuffer += sqrt(euclidianDistance);
	writer.relativeErrorBuffer += sqrt(euclidianDistance);
}	

void NeuralNetwork::updateWeights(int k) {
	for (int i = 0; i < neuronCount; i++) {
		for (int j = 0; j < classCount; j ++) {
			outputLayer[j].weights[i] += learningRate * deltaY[j] * h[i];
		}
	}
	for (int i = 0; i < attributeCount; i++) {
		for (int j = 0; j < neuronCount; j++) {
			hiddenLayer[j].weights[i] += learningRate * deltaH[j] * trainSet[k][i];
		}
	}
}
