#include <opencv2\opencv.hpp>
#include <stdio.h>
#include <iostream>
#include <string>
#include <stdlib.h>
#include <fstream>

#include "SupportVectorMachine.h"
#include "ArtificialNeuralNetworks.h"
#include "NaiveBayes.h"
#include "DTree.h"
#include "KNN.h"

using namespace std;
using namespace cv;


void readFeatures(string filePath, Mat& trainData, Mat& testData);
void readLabels(string filePath, Mat& trainDataLabels, Mat& testDataLabels);
void printMatrix(Mat&);
float evaluate(Mat& predicted, Mat& actual);
Mat kfold(Mat& data, Mat& labels, CvSVMParams& params, const int k);
float accuracy (Mat& confusion);
float precision (Mat& confusion);
float sensibility (Mat& confusion);
float specificity (Mat& confusion);
float youden (Mat& confusion);
float ROC (Mat& confusion);
void test3svm();

void test1svm();
void test2svm();
void test3svm();
void test4ann();
void test5bayes();
void test6dtree();
void test7knn();

int main (int argc, char** argv) 
{
	//test1svm();
	//test2svm();
	test3svm();
	//test4ann();
	//test5bayes();
	//test6dtree();
	//test7knn();

	system("pause");
	return 0;
}

void test2svm()
{
	const int NUM_ATTRIBUTES = 39;
	const int NUM_TOTAL_DATA = 51;
	const float PERCENT_TEST_DATA = 0;

	int NUM_TEST_DATA = NUM_TOTAL_DATA*(PERCENT_TEST_DATA/100.f);
	int NUM_TRAIN_DATA = NUM_TOTAL_DATA - NUM_TEST_DATA;
	Mat_<float> trainData(NUM_TRAIN_DATA, NUM_ATTRIBUTES);
	Mat_<float> testData(NUM_TEST_DATA, NUM_ATTRIBUTES);
	Mat_<float> trainDataLabels(NUM_TRAIN_DATA,1);
	Mat_<float> testDataLabels(NUM_TEST_DATA, 1);

	readFeatures("Features\\Sub_features_N.txt", trainData, testData);
	cout << "TEST DATA" << endl;

	readLabels("Features\\Sub_labels.txt",trainDataLabels, testDataLabels);
	cout << "TEST DATA" << endl;

	CvSVMParams params = CvSVMParams();

    params.svm_type = CvSVM::C_SVC;
    params.kernel_type = CvSVM::RBF; //CvSVM::RBF, CvSVM::LINEAR ...
    params.degree = 0; // for poly
    params.gamma = 20; // for poly/rbf/sigmoid
    params.coef0 = 0; // for poly/sigmoid

    params.C = 7; // for CV_SVM_C_SVC, CV_SVM_EPS_SVR and CV_SVM_NU_SVR
    params.nu = 0.0; // for CV_SVM_NU_SVC, CV_SVM_ONE_CLASS, and CV_SVM_NU_SVR
    params.p = 0.0; // for CV_SVM_EPS_SVR

    params.class_weights = NULL; // for CV_SVM_C_SVC
    params.term_crit.type = CV_TERMCRIT_ITER +CV_TERMCRIT_EPS;
    params.term_crit.max_iter = 1000;
    params.term_crit.epsilon = 1e-6; 
	CvSVM svm;

	svm.train_auto(trainData, trainDataLabels, Mat(), Mat(), params, 18);

	////params = svm.get_params();
	////
	////printf("The optimal parameters are: degree = %f, gamma = %f, coef0 = %f, C = %f, nu = %f, p = %f\n",
	////	params.degree, params.gamma, params.coef0, params.C, params.nu, params.p);

	////svm.predict(
	//// perform classifier testing and report results
	//int NUMBER_OF_CLASSES = 2;
	//Mat test_sample;
	//int correct_class = 0;
	//int wrong_class = 0;
	//int false_positives [2];
	//char class_labels[2];
	//float result;

	//// zero the false positive counters in a simple loop

	//for (int i = 0; i < NUMBER_OF_CLASSES; i++)
	//{
	//	false_positives[i] = 0;
	//	class_labels[i] = (char) 65 + i; // ASCII 65 = A
	//}


	//for (int tsample = 0; tsample < NUM_TEST_DATA; tsample++)
	//{

	//	// extract a row from the testing matrix

	//	test_sample = testData.row(tsample);

	//	// run SVM classifier

	//	result = svm.predict(test_sample);

	//	// printf("Testing Sample %i -> class result (character %c)\n", tsample, class_labels[((int) result) - 1]);

	//	// if the prediction and the (true) testing classification are the same
	//	// (N.B. openCV uses a floating point decision tree implementation!)
	//	float r = fabs(result - testDataLabels.at<float>(tsample, 0));
	//	if (r >= FLT_EPSILON)
	//	{
	//		// if they differ more than floating point error => wrong class

	//		wrong_class++;

	//		false_positives[(int) (testDataLabels.at<float>(tsample, 0) - 1)]++;

	//	}
	//	else
	//	{

	//		// otherwise correct

	//		correct_class++;
	//	}
	//}

	//printf( "\tCorrect classification: %d (%g%%)\n"
	//	"\tWrong classifications: %d (%g%%)\n",
	//	correct_class, (double) correct_class*100/NUM_TEST_DATA,
	//	wrong_class, (double) wrong_class*100/NUM_TEST_DATA);

	//for (unsigned char i = 0; i < NUMBER_OF_CLASSES; i++)
	//{
	//	printf( "\tClass (character %c) false postives 	%d (%g%%)\n",class_labels[(int) i],
	//		false_positives[(int) i],
	//		(double) false_positives[i]*100/NUM_TEST_DATA);
	//}

	Mat_<float> predicted(trainDataLabels.rows, 1);

    for(int i = 0; i < trainData.rows; i++) {
        Mat sample = trainData.row(i);
        predicted.at<float>(i, 0) = svm.predict(sample);
    }

	//cout << "Accuracy_{SVM} = " << evaluate(predicted, trainDataLabels) << endl;
	Mat confusion = kfold(trainData, trainDataLabels, svm.get_params(), 17);
	cout << confusion << endl;
	cout << "Accuracy: " << accuracy(confusion) << endl;
	cout << "precision: " <<precision(confusion) << endl;
	cout << "sensibility: " <<sensibility(confusion) << endl;
	cout << "specificity: " << specificity(confusion) << endl;
	cout << "Youden: " << youden(confusion) << endl;
	cout << "ROC: " << ROC(confusion) << endl;

}

void test1svm()
{
	const int NUM_ATTRIBUTES = 39;
	const int NUM_TOTAL_DATA = 51;
	const float PERCENT_TEST_DATA = 20;

	int NUM_TEST_DATA = NUM_TOTAL_DATA*(PERCENT_TEST_DATA/100.f);
	int NUM_TRAIN_DATA = NUM_TOTAL_DATA - NUM_TEST_DATA;
	Mat_<float> trainData(NUM_TRAIN_DATA, NUM_ATTRIBUTES);
	Mat_<float> testData(NUM_TEST_DATA, NUM_ATTRIBUTES);
	Mat_<float> trainDataLabels(NUM_TRAIN_DATA,1);
	Mat_<float> testDataLabels(NUM_TEST_DATA, 1);

	readFeatures("Features\\Sub_features_N_mod.txt", trainData, testData);

	cout << "TEST DATA" << endl;


	readLabels("Features\\Sub_labels_mod.txt",trainDataLabels, testDataLabels);

	cout << "TEST DATA" << endl;


	CvSVMParams params = CvSVMParams();

    params.svm_type = CvSVM::C_SVC;
    params.kernel_type = CvSVM::RBF; //CvSVM::RBF, CvSVM::LINEAR ...
    params.degree = 0; // for poly
    params.gamma = 20; // for poly/rbf/sigmoid
    params.coef0 = 0; // for poly/sigmoid

    params.C = 7; // for CV_SVM_C_SVC, CV_SVM_EPS_SVR and CV_SVM_NU_SVR
    params.nu = 0.0; // for CV_SVM_NU_SVC, CV_SVM_ONE_CLASS, and CV_SVM_NU_SVR
    params.p = 0.0; // for CV_SVM_EPS_SVR

    params.class_weights = NULL; // for CV_SVM_C_SVC
    params.term_crit.type = CV_TERMCRIT_ITER +CV_TERMCRIT_EPS;
    params.term_crit.max_iter = 1000;
    params.term_crit.epsilon = 1e-6; 
	CvSVM svm;

	svm.train_auto(trainData, trainDataLabels, Mat(), Mat(), params, 2);

	////params = svm.get_params();
	////
	////printf("The optimal parameters are: degree = %f, gamma = %f, coef0 = %f, C = %f, nu = %f, p = %f\n",
	////	params.degree, params.gamma, params.coef0, params.C, params.nu, params.p);

	////svm.predict(
	//// perform classifier testing and report results
	//int NUMBER_OF_CLASSES = 2;
	//Mat test_sample;
	//int correct_class = 0;
	//int wrong_class = 0;
	//int false_positives [2];
	//char class_labels[2];
	//float result;

	//// zero the false positive counters in a simple loop

	//for (int i = 0; i < NUMBER_OF_CLASSES; i++)
	//{
	//	false_positives[i] = 0;
	//	class_labels[i] = (char) 65 + i; // ASCII 65 = A
	//}


	//for (int tsample = 0; tsample < NUM_TEST_DATA; tsample++)
	//{

	//	// extract a row from the testing matrix

	//	test_sample = testData.row(tsample);

	//	// run SVM classifier

	//	result = svm.predict(test_sample);

	//	// printf("Testing Sample %i -> class result (character %c)\n", tsample, class_labels[((int) result) - 1]);

	//	// if the prediction and the (true) testing classification are the same
	//	// (N.B. openCV uses a floating point decision tree implementation!)
	//	float r = fabs(result - testDataLabels.at<float>(tsample, 0));
	//	if (r >= FLT_EPSILON)
	//	{
	//		// if they differ more than floating point error => wrong class

	//		wrong_class++;

	//		false_positives[(int) (testDataLabels.at<float>(tsample, 0) - 1)]++;

	//	}
	//	else
	//	{

	//		// otherwise correct

	//		correct_class++;
	//	}
	//}

	//printf( "\tCorrect classification: %d (%g%%)\n"
	//	"\tWrong classifications: %d (%g%%)\n",
	//	correct_class, (double) correct_class*100/NUM_TEST_DATA,
	//	wrong_class, (double) wrong_class*100/NUM_TEST_DATA);

	//for (unsigned char i = 0; i < NUMBER_OF_CLASSES; i++)
	//{
	//	printf( "\tClass (character %c) false postives 	%d (%g%%)\n",class_labels[(int) i],
	//		false_positives[(int) i],
	//		(double) false_positives[i]*100/NUM_TEST_DATA);
	//}

	Mat_<float> predicted(testDataLabels.rows, 1);

    for(int i = 0; i < testData.rows; i++) {
        Mat sample = testData.row(i);

        //float x = sample.at<float>(0,0);
        //float y = sample.at<float>(0,1);

        predicted.at<float>(i, 0) = svm.predict(sample);
    }

	cout << "Accuracy_{SVM} = " << evaluate(predicted, testDataLabels) << endl;
}

void readFeatures(string filePath, Mat& trainData, Mat& testData)
{
	ifstream in(filePath);
	float value;
	if (in.is_open())
	{
		for (int i = 0; i < trainData.rows + testData.rows; i++)
		{
			for (int j = 0; j < trainData.cols; j++)
			{
				in >> value;
				if (i < trainData.rows)
				{
					trainData.at<float>(i,j) = value;
				}
				else
				{
					testData.at<float>(i-trainData.rows,j) = value;
				}
			}
		}
	}
}

void readLabels(string filePath, Mat& trainDataLabels, Mat& testDataLabels)
{
	ifstream in(filePath);
	string value;
	int label;
	if (in.is_open())
	{
		for (int i = 0; i < trainDataLabels.rows + testDataLabels.rows; i++)
		{
			in >> value;
			if (value == "doente")
			{
				label = 1;
			}
			else if (value == "saudavel")
			{
				label = 0;
			}
			if (i < trainDataLabels.rows)
			{
				trainDataLabels.at<float>(i,0) = label;	
			}
			else
			{
				testDataLabels.at<float>(i-trainDataLabels.rows,0) = label;
			}
		}
	}
}

float evaluate(Mat& predicted, Mat& actual) {
    assert(predicted.rows == actual.rows);
    int t = 0;
    int f = 0;
    for(int i = 0; i < actual.rows; i++) {
        float p = predicted.at<float>(i,0);
        float a = actual.at<float>(i,0);
        /*if((p >= 0.0 && a >= 0.0) || (p <= 0.0 &&  a <= 0.0)) {*/
		if(fabs(p-a)< FLT_EPSILON) {
            t++;
        } else {
            f++;
        }
    }
    return (t * 1.0) / (t + f);
}

Mat kfold(Mat& data, Mat& labels, CvSVMParams& params, const int k)
{
	Mat confusion;
	confusion = Mat::zeros(2,2, CV_32S);
	int testNum = (data.rows / k);
	int init = 0;
	int final = testNum;;
	int cntTrain = 0;
	int cntTest = 0;
	for (int i = 0; i < k; i++)
	{
		Mat_<float> test(testNum, data.cols);
		Mat_<float> train(data.rows - testNum, data.cols);
		Mat_<float> testLabels(testNum, 1);
		Mat_<float> trainLabels(data.rows - testNum, 1);
		for (int n = 0; n < data.rows; n++)
		{
			Mat tempData = data.row(n);
			int tempLabel = labels.at<float>(n,0);
			if (n >= init && n < final )
			{
				test.row(cntTest) = data.row(n) + 0;
				testLabels.at<float>(cntTest,0) = tempLabel;
				cntTest++;
			}
			else
			{

				train.row(cntTrain) = data.row(n) + 0;
				trainLabels.at<float>(cntTrain,0) = tempLabel;
				cntTrain++;
			}
		}
		init = init + testNum;
		final = final + testNum;
		cntTrain = 0;
		cntTest = 0;
		CvSVM svm;
		svm.train(train, trainLabels, Mat(), Mat(), params);
		Mat_<float> predicted(testLabels.rows, 1);
		for(int n = 0; n < test.rows; n++) {
			Mat sample = test.row(n);
			float result = svm.predict(sample);
			predicted.at<float>(n, 0) = result;
		}
		//cout << "TEST" << endl;
		//cout << testLabels << endl;
		//cout << "PREDICTED" << endl;
		//cout << predicted << endl;
		for(int i = 0; i < testLabels.rows; i++) {
			float p = predicted.at<float>(i,0);
			float a = testLabels.at<float>(i,0);
			if ( fabs(p-a) < FLT_EPSILON && a == 1)
			{
				int temp = confusion.at<int>(0,0);
				confusion.at<int>(0,0) = temp + 1;
			}
			else if ( fabs(p-a)>= FLT_EPSILON && a == 0)
			{
				int temp = confusion.at<int>(0,1);
				confusion.at<int>(0,1) = temp + 1;
			}
			else if ( fabs(p-a)>= FLT_EPSILON && a == 1)
			{
				int temp = confusion.at<int>(1,0);
				confusion.at<int>(1,0) = temp + 1;
			}
			else if ( fabs(p-a) < FLT_EPSILON && a == 0)
			{
				int temp = confusion.at<int>(1,1);
				confusion.at<int>(1,1) = temp + 1;
			}
		}
	}
	return confusion;
}

float accuracy (Mat& confusion)
{
	int TP = confusion.at<int>(0,0);
	int TN = confusion.at<int>(1,1);
	int FN = confusion.at<int>(0,1);
	int FP = confusion.at<int>(1,0);
	return float(TP + TN) / float(TP + FP + FN + TN);
}

float precision (Mat& confusion)
{
	int TP = confusion.at<int>(0,0);
	int TN = confusion.at<int>(1,1);
	int FN = confusion.at<int>(0,1);
	int FP = confusion.at<int>(1,0);
	return float(TP) / float(TP + FP);
}

float sensibility (Mat& confusion)
{
	int TP = confusion.at<int>(0,0);
	int TN = confusion.at<int>(1,1);
	int FN = confusion.at<int>(0,1);
	int FP = confusion.at<int>(1,0);
	return float(TP) / float(TP + FN);
}

float specificity (Mat& confusion)
{
	int TP = confusion.at<int>(0,0);
	int TN = confusion.at<int>(1,1);
	int FN = confusion.at<int>(0,1);
	int FP = confusion.at<int>(1,0);
	return float(TN) / float(TN + FP);
}

float youden (Mat& confusion)
{
	return sensibility(confusion) + specificity(confusion) - 1;
}

float ROC (Mat& confusion)
{
	return abs(1 + sensibility(confusion) - (1 - specificity(confusion)))/2;
}

void test3svm()
{
	//best attributes
	int bestAtt[1][39] = {1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0} ;
	KNN knn = KNN(51,39);
	Mat ba = Mat_<int>(1,39);
	srand(time(NULL));
	for (int i = 0; i < 39; i++)
	{
		ba.at<int>(0,i) = bestAtt[0][i];
	}
	SupportVectorMachine svm = SupportVectorMachine(51,39);
	svm.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt", ba);
	svm.train_auto();
	svm.constructConfusionMatrix();
	svm.evaluate();
	svm.printEvaluation();
	//svm.initializeGeneticAlgorithm(50,39,50,5, 5);
	//Mat bestAttributes = svm.runGeneticAlgorithm();
	//svm.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt", bestAttributes);
}

void test4ann()
{
	ArtificialNeuralNetworks ann = ArtificialNeuralNetworks(51,39);
	ann.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt");
	/*ann.train_auto();
	svm.constructConfusionMatrix();
	svm.evaluate();
	svm.printEvaluation();*/
	ann.initializeGeneticAlgorithm(50,39,50,5, 5);
	Mat bestAttributes = ann.runGeneticAlgorithm();
	ann.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt", bestAttributes);
}

void test5bayes()
{
	NaiveBayes bayes = NaiveBayes(51,39);
	bayes.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt");
	/*ann.train_auto();
	svm.constructConfusionMatrix();
	svm.evaluate();
	svm.printEvaluation();*/
	bayes.initializeGeneticAlgorithm(50,39,50,5, 5);
	Mat bestAttributes = bayes.runGeneticAlgorithm();
	bayes.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt", bestAttributes);
}

void test6dtree()
{
	DTree dtree = DTree(51,39);
	dtree.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt");
	/*ann.train_auto();
	svm.constructConfusionMatrix();
	svm.evaluate();
	svm.printEvaluation();*/
	dtree.initializeGeneticAlgorithm(100,39,100,5, 5);
	Mat bestAttributes = dtree.runGeneticAlgorithm();
	dtree.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt", bestAttributes);
}

void test7knn()
{
	//best attributes
	int bestAtt[1][39] = {1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0} ;
	KNN knn = KNN(51,39);
	Mat ba = Mat_<int>(1,39);
	srand(time(NULL));
	for (int i = 0; i < 39; i++)
	{
		ba.at<int>(0,i) = bestAtt[0][i];
	}

	knn.read("Features\\Sub_features_N.txt", "Features\\Sub_labels.txt",ba);
	//knn.initializeGeneticAlgorithm(100,39,100,5, 5);
	//Mat bestAttributes = knn.runGeneticAlgorithm();
	knn.plot(knn.getData());
}