#include "feature.h"
#include "dataset.h"
#include "stdio.h"
#include <iostream>
#include <fstream>

using namespace cv;
using namespace std;


void trainRForest(std::string trainPath, std::string out_dir)
{
	int nFeatures = 72/*21 * 25 * 2*/;
	cv::Mat train_set;
	cv::Mat responses;
	parseFileAndExtractFeatures(Path::combine(trainPath, "train_tree_leaves.csv").c_str(), train_set, responses, nFeatures, calcRadAngleHist/*calcHoCS*/);

	//Random forest
	Mat var_type = Mat(nFeatures + 1, 1, CV_8U );
	var_type.setTo(Scalar(CV_VAR_NUMERICAL)); // all inputs are numerical

	// this is a classification problem (i.e. predict a discrete number of class
	// outputs) so reset the last (+1) output var_type element to CV_VAR_CATEGORICAL

	var_type.ptr<uchar>(0)[nFeatures] = CV_VAR_CATEGORICAL;
	Mat prior_mat = Mat::ones(1, nFeatures, CV_32F);  // weights of each classification for classes

	double result; // value returned from a prediction
	CvRTParams params = CvRTParams(20, // max depth
		2, // min sample count
		0, // regression accuracy: N/A here
		false, // compute surrogate split, no missing data
		10, // max number of categories (use sub-optimal algorithm for larger numbers)
		prior_mat.ptr<float>(0), // the array of priors
		false,  // calculate variable importance
		4,       // number of variables randomly selected at node and used to find the best split(s).
		500,	 // max number of trees in the forest
		0.01f,				// forest accuracy
		CV_TERMCRIT_ITER |	CV_TERMCRIT_EPS // termination criteria
		);

	// train random forest classifier (using training data)
	cv::Ptr<CvRTrees> rtree = new CvRTrees;

	rtree->train(train_set, CV_ROW_SAMPLE, responses,
		Mat(), Mat(), var_type, Mat(), params);

	rtree->save(Path::combine(out_dir, "random_forest_1000_trees_d20_hra.xml").c_str());
}

void trainNaiveBayes(std::string trainPath, std::string out_dir)
{
	int nFeatures = 72;
	cv::Mat train_set;
	cv::Mat responses;
	parseFileAndExtractFeatures(Path::combine(trainPath, "train_tree_leaves.csv").c_str(), train_set, responses, nFeatures, calcRadAngleHist);	

	CvNormalBayesClassifier bayes(train_set, responses);
	bayes.save(Path::combine(out_dir, "naive_bayes.xml").c_str());
}

void trainSVM(std::string trainPath, std::string out_dir)
{
	int nFeatures = 1050;
	cv::Mat train_set;
	cv::Mat responses;    

	int HOCS_A = 2, HOCS_B = 3, HOCS_NB = 21, HOCS_NSC = 25;

	//filename = cv::format(filename.c_str(), HOCS_A, HOCS_B, HOCS_NB, HOCS_NSC);
	//printf("Will save SVM to %s",Path::combine(out_dir, filename).c_str());
    
	parseFileAndExtractFeatures(Path::combine(trainPath, "train_tree_leaves.csv").c_str(), train_set, responses, nFeatures, /*calcRadAngleHist*/calcHoCS);	

	//ofstream ofs("G:\\machine-learning-leaves\\scale.txt");
	//for(int i = 0; i < nFeatures; i++)
	//{

	//	Scalar mean, stddev;
	//	cv::meanStdDev(train_set.col(i), mean, stddev);
	//	train_set.col(i) -= mean[0];
	//	train_set.col(i) /= stddev[0];

	//	ofs<< mean[0] << " " << stddev[0] << endl;
	//}
	//ofs.close();

	{
		printf("Started training for linear svc\n");
		cv::String filename = "svm_hoc_svc_linear.xml";
		//SVM
		cv::SVMParams params;
		params.svm_type    = CvSVM::C_SVC;
		//CvSVM::NU_SVC
		//params.nu = 0.001;
		//params.C = 0.1;
		params.kernel_type = CvSVM::LINEAR;
		//params.kernel_type = CvSVM::RBF;
		params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6);

		cv::SVM svm;
		svm.train_auto(train_set, responses, Mat(), Mat(), params);
		//svm.train(train_set, responses, Mat(), Mat(), params);
		svm.save(Path::combine(out_dir, filename).c_str());
	}

	{
		printf("Started training for rbf svc\n");
		cv::String filename = "svm_hoc_svc_rbf.xml";
		//SVM
		cv::SVMParams params;
		params.svm_type    = CvSVM::C_SVC;
		//CvSVM::NU_SVC
		//params.nu = 0.001;
		//params.C = 0.1;
		//params.kernel_type = CvSVM::LINEAR;
		params.kernel_type = CvSVM::RBF;
		params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6);

		cv::SVM svm;
		svm.train_auto(train_set, responses, Mat(), Mat(), params);
		//svm.train(train_set, responses, Mat(), Mat(), params);
		svm.save(Path::combine(out_dir, filename).c_str());
	}
	{
		printf("Started training for linear nu\n");
		cv::String filename = "svm_hoc_nu_linear.xml";
		//SVM
		cv::SVMParams params;
		//params.svm_type    = CvSVM::C_SVC;
		params.svm_type    = CvSVM::NU_SVC;
		params.nu = 0.001;
		//params.C = 0.1;
		params.kernel_type = CvSVM::LINEAR;
		//params.kernel_type = CvSVM::RBF;
		params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6);

		cv::SVM svm;
		svm.train_auto(train_set, responses, Mat(), Mat(), params);
		//svm.train(train_set, responses, Mat(), Mat(), params);
		svm.save(Path::combine(out_dir, filename).c_str());
	}
	{
		printf("Started training for rbf nu\n");
		cv::String filename = "svm_hoc_nu_rbf.xml";
		//SVM
		cv::SVMParams params;
		//params.svm_type    = CvSVM::C_SVC;
		params.svm_type    = CvSVM::NU_SVC;
		params.nu = 0.001;
		//params.C = 0.1;
		//params.kernel_type = CvSVM::LINEAR;
		params.kernel_type = CvSVM::RBF;
		params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6);

		cv::SVM svm;
		svm.train_auto(train_set, responses, Mat(), Mat(), params);
		//svm.train(train_set, responses, Mat(), Mat(), params);
		svm.save(Path::combine(out_dir, filename).c_str());
	}
}

void trainKNN(std::string trainPath, std::string out_dir)
{
   // no way
	int nFeatures = 72;
	cv::Mat train_set;
	cv::Mat responses;
	parseFileAndExtractFeatures(Path::combine(trainPath, "train_tree_leaves.csv").c_str(), train_set, responses, nFeatures, calcRadAngleHist);	

	//KNN
	cv::KNearest knn;
	knn.train(train_set, responses, Mat(), false, 50);
}

const char * params =
	"{h | help |      | print this message   }"
	"{d | dataset | G:\\machine-learning-leaves  | path to folder with train.csv }"
	"{o | out_dir | G:\\machine-learning-leaves  | path to result folder}"
	"{n | name   | G:\\machine-learning-leaves\\name.xml  | classifier name}"
	"{m | method  | 2  | Type of classifier: "
					   " 0 - Random forest"
					   " 1 - Naive Bayes"
					   " 2 - SVM}"
					   " 3 - KNN}";

int main(int argc, const char* argv[])
{
	cv::CommandLineParser parser(argc, argv, params);
	std::string dataset = parser.get<std::string>("dataset");
	std::string outdir = parser.get<std::string>("out_dir");

	if(parser.get<bool>("help") || dataset.empty())
	{
		parser.printParams();
		return 1;
	}

	
	int method = parser.get<int>("method");

	//if (argc<3)
	//{
	//	printf("Usage: training_app.exe <path_to_folder_with_train_csv> <method>\n");
	//	printf("where <method>:\n");
	//	printf("\t 0 for Random forest\n");
	//	printf("\t 1 for Naive Bayes\n");
	//	printf("\t 2 for SVM\n");
	//	return 1;
	//}

	switch (method)
	{
	case 0:
		trainRForest(dataset, outdir);
		break;
	case 1:
		trainNaiveBayes(dataset, outdir);
		break;
	case 2:
		trainSVM(dataset, outdir);
		break;
	case 3:
		trainKNN(dataset, outdir);
		break;
	}	

	return 0;
}