#include "NaiveBayes.h"


NaiveBayes::NaiveBayes()
{

}

NaiveBayes::NaiveBayes(int numRows, int numAttributes) : super(numRows, numAttributes)
{

}

void NaiveBayes::train_auto()
{

}

void NaiveBayes::train()
{
	bayes.train(getData(), getLabels());
}

float NaiveBayes::predict(Mat& sample)
{
	return bayes.predict(sample);
}


void NaiveBayes::constructConfusionMatrix()
{
	//confusion = Mat::zeros(2,2, CV_32S);
	//int testNum = (getData().rows / k);
	//int init = 0;
	//int final = testNum;;
	//int cntTrain = 0;
	//int cntTest = 0;
	//for (int i = 0; i < k; i++)
	//{
	//	Mat_<float> test(testNum, getData().cols);
	//	Mat_<float> train(getData().rows - testNum, getData().cols);
	//	Mat_<float> testLabels(testNum, 1);
	//	Mat_<float> trainLabels(getData().rows - testNum, 1);
	//	for (int n = 0; n < getData().rows; n++)
	//	{
	//		Mat tempData = getData().row(n);
	//		int tempLabel = getLabels().at<float>(n,0);
	//		if (n >= init && n < final )
	//		{
	//			test.row(cntTest) = getData().row(n) + 0;
	//			testLabels.at<float>(cntTest,0) = tempLabel;
	//			cntTest++;
	//		}
	//		else
	//		{

	//			train.row(cntTrain) = getData().row(n) + 0;
	//			trainLabels.at<float>(cntTrain,0) = tempLabel;
	//			cntTrain++;
	//		}
	//	}
	//	init = init + testNum;
	//	final = final + testNum;

	//	cntTrain = 0;
	//	cntTest = 0;
	//	CvSVM svm;

	//	svm.train(train, trainLabels, Mat(), Mat(), params);
	//	Mat_<float> predicted(testLabels.rows, 1);
	//	for(int n = 0; n < test.rows; n++) {
	//		Mat sample = test.row(n);
	//		float result = svm.predict(sample);
	//		predicted.at<float>(n, 0) = result;
	//	}
	//	//cout << "TEST" << endl;
	//	//cout << testLabels << endl;
	//	//cout << "PREDICTED" << endl;
	//	//cout << predicted << endl;
	//	for(int i = 0; i < testLabels.rows; i++) {
	//		float p = predicted.at<float>(i,0);
	//		float a = testLabels.at<float>(i,0);
	//		if ( fabs(p-a) < FLT_EPSILON && a == 1)
	//		{
	//			int temp = confusion.at<int>(0,0);
	//			confusion.at<int>(0,0) = temp + 1;
	//		}
	//		else if ( fabs(p-a)>= FLT_EPSILON && a == 0)
	//		{
	//			int temp = confusion.at<int>(0,1);
	//			confusion.at<int>(0,1) = temp + 1;
	//		}
	//		else if ( fabs(p-a)>= FLT_EPSILON && a == 1)
	//		{
	//			int temp = confusion.at<int>(1,0);
	//			confusion.at<int>(1,0) = temp + 1;
	//		}
	//		else if ( fabs(p-a) < FLT_EPSILON && a == 0)
	//		{
	//			int temp = confusion.at<int>(1,1);
	//			confusion.at<int>(1,1) = temp + 1;
	//		}
	//	}
	//}
	////cout << confusion << endl;
	confusion = createConfusionMatrix(getData(),getLabels(),k);
}







float NaiveBayes::getROCevaluation(Mat& d, Mat& l)
{
	Mat conf = createConfusionMatrix(d,l, k);
	return MLEvaluation::accuracy(conf);
}

Mat NaiveBayes::createConfusionMatrix(Mat& d, Mat& l, int k)
{
	Mat conf = Mat::zeros(2,2, CV_32S);
	int testNum = (d.rows / k);
	int init = 0;
	int final = testNum;;
	int cntTrain = 0;
	int cntTest = 0;
	for (int i = 0; i < k; i++)
	{
		Mat_<float> test(testNum, d.cols);
		Mat_<float> train(d.rows - testNum, d.cols);
		Mat_<float> testLabels(testNum, 1);
		Mat_<float> trainLabels(d.rows - testNum, 1);
		for (int n = 0; n < d.rows; n++)
		{
			Mat tempData = d.row(n);
			int tempLabel = l.at<float>(n,0);
			if (n >= init && n < final )
			{
				test.row(cntTest) = d.row(n) + 0;
				testLabels.at<float>(cntTest,0) = tempLabel;
				cntTest++;
			}
			else
			{

				train.row(cntTrain) = d.row(n) + 0;
				trainLabels.at<float>(cntTrain,0) = tempLabel;
				cntTrain++;
			}
		}
		init = init + testNum;
		final = final + testNum;

		cntTrain = 0;
		cntTest = 0;
		CvNormalBayesClassifier confBayes;

		confBayes.train(train, trainLabels);

		Mat_<float> predicted(testLabels.rows, 1);
		for(int n = 0; n < test.rows; n++) {
			Mat sample = test.row(n);
			float result = confBayes.predict(sample);
			predicted.at<float>(n, 0) = result;
		}
		//cout << "TEST" << endl;
		//cout << testLabels << endl;
		//cout << "PREDICTED" << endl;
		//cout << predicted << endl;
		for(int i = 0; i < testLabels.rows; i++) {
			float p = predicted.at<float>(i,0);
			float a = testLabels.at<float>(i,0);
			if ( fabs(p-a) < FLT_EPSILON && a == 1)
			{
				int temp = conf.at<int>(0,0);
				conf.at<int>(0,0) = temp + 1;
			}
			else if ( (fabs(p-a)>= FLT_EPSILON) && a == 0)
			{
				int temp = conf.at<int>(0,1);
				conf.at<int>(0,1) = temp + 1;
			}
			else if ( fabs(p-a)>= FLT_EPSILON && a == 1)
			{
				int temp = conf.at<int>(1,0);
				conf.at<int>(1,0) = temp + 1;
			}
			else if ( fabs(p-a) < FLT_EPSILON && a == 0)
			{
				int temp = conf.at<int>(1,1);
				conf.at<int>(1,1) = temp + 1;
			}
		}
	}
	//cout << conf << endl;
	return conf;
}



Mat NaiveBayes::runGeneticAlgorithm()
{
	//Initialize
	srand(time(NULL));
	vector<KeySort> elements = initializeElements();

	for (int N = 0; N < numberOfIterations; N++)
	{
		vector<KeySort> newElements = initializeElements();
		for (int i = 0; i < numberOfElements; i++)
		{
			Mat currData = createDataMat(elements,i);
			Mat currLabels = createLabelsMat(elements,i);

			//attribute current data to new Elements
			newElements[i].key = getROCevaluation(currData, currLabels);
			newElements[i].element = elements[i].element;

			
		}
		std::sort(newElements.begin(), newElements.end());
		std::reverse(newElements.begin(), newElements.end());

		newElements = crossOverAll(newElements);
		elements = newElements;
		cout << ((float)N/(float)numberOfIterations) * 100 << " %" << endl;
		cout << "BEST accuracy " << elements[0].key << endl;
	}
	std::sort(elements.begin(), elements.end());
	std::reverse(elements.begin(), elements.end());
	cout << elements[0].element << endl;
	cout <<  elements[0].key << endl;
	return elements[0].element;
}
