/*!  \file  UnitTester.cpp
 *	  \brief  
 * 
 *   \details   details
 *   \author    Yordan Madzhunkov German Research School for Simulation Sciences GmbH
 *   \version   1.0
 *   \date      01/17/2013 10:58:56 AM
 *   \copyright GNU Public License.
 */
#include <iostream>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include "UnitTester.h"
#include <fstream>
#include <math.h>
#include <algorithm>
#include <limits.h>
#include "Map.h"
#include "Timer.h"
#include "ImplementationFactory.h"


using namespace std;
UnitTester * UnitTester::myInstance = 0;
int UnitTester::getSVNRevisionFromSVN() {
	int rev = -1;
	int tmp = system("svn info | grep Revision | awk '{print $2}' > revision.txt");
	ifstream revFile("revision.txt");
	if (revFile.is_open()) {
		revFile >> rev; 
		revFile.close();
		tmp |= system("rm revision.txt");
		if (tmp != 0) cerr << "Don't worry, be happy ( see UnitTester.cpp) " << endl;
	};
	myRev = rev;
	return rev;
}
#ifdef NDEBUG	
const char *  UnitTester::getDataFileName  () const { 
	return "UnitTester_opt.dat"; 
};
#else
const char *  UnitTester::getDataFileName  () const { 
	return "UnitTester_deb.dat"; 
};
#endif
UnitTester * UnitTester::instance() {
	if (!myInstance) 
		myInstance = new UnitTester();
	return myInstance;
}
UnitTester * UnitTester::instance( int argc, char *argv[]) {
	if (!myInstance) 
		myInstance = new UnitTester(argc, argv);	
	return myInstance;
}

UnitTester::UnitTester(int argc_, char *argv_[]) {
	tests  = new UnitTest*[MaxNumberOfTests];
	benchs = new Benchmark*[MaxNumberOfBenchmarks];
	nTests = 0; nBenchmarks = 0;
	argc = argc_; argv = argv_;
	verbose = 1;
	timer = Timer::instance(); 
	myRev = getSVNRevisionFromSVN();
	if (!tests) cerr << " Out of memory" << endl;
	if (verbose > 0) {
		cout << "SVN Revision " << myRev << endl;;
	}
	allTests      = false;
	allBenchmarks = false;
	
}
UnitTester::~UnitTester() {
	if (tests ) delete []  tests; tests = 0;
	if (benchs) delete [] benchs; benchs = 0;
	myInstance = 0;
	cout << "Deleting Unit Tester" << endl;
}

void UnitTester::registerTest  (UnitTest & test){
	if (nTests < MaxNumberOfTests) {
		tests[nTests++] = &test;
	} else 
		cerr << "ERROR [UnitTester::registerTest] Maximum number of test reached" << endl;
}
void UnitTester::deregisterTest(UnitTest & test) {
	int found = 0;
	for (unsigned int i = nTests ; i > 0;  i--) 
		if (tests[i - 1] == &test) {
			tests[i - 1] = tests[--nTests];
			tests[nTests] = 0;
			found = 1;
			break;
		}
/*  	cout << "Still here" << endl;
	for (int i = 0; i < nTests; i++) 
		cout << tests[i]->getName() << endl; 
*/
	if (!found) 
		cerr << "ERROR [UnitTester::deregisterTest] Test not found" << endl;
	if (nTests == 0 && nBenchmarks == 0) {
		delete myInstance;
	}	
}
void UnitTester::registerBench  (Benchmark & b){
	if (nBenchmarks < MaxNumberOfBenchmarks) {
		benchs[nBenchmarks++] = &b;
	} else 
		cerr << "ERROR [UnitTester::registerBench] Maximum number of benchmarks reached" << endl;
}
void UnitTester::deregisterBench(Benchmark & b) {
	int found = 0;
	for (unsigned int i = nBenchmarks; i > 0;  i--) 
		if (benchs[i - 1] == &b) {
			benchs[i - 1] = benchs[--nBenchmarks];
			benchs[nBenchmarks] = 0;
			found = 1;
			break;
		}
	if (!found) 
		cerr << "ERROR [UnitTester::deregisterBench] Benchmark not found" << endl;
	if (nTests == 0 && nBenchmarks == 0) {
		delete myInstance;
	}	
}

void  UnitTester::runTest(const unsigned int i) {
	if ( i < nTests && tests[i] ) {
		if (verbose > 0) {
			cout << "Running test: " << tests[i]->getName();
			cout.flush();
		}
		bool passed = tests[i]->test();
		int l = 100;
		const int shift = 60;
		if ( tests[i]->getName() ) l = strlen(tests[i]->getName());
		if (passed) {
			if (verbose > 0) {
				for (int i = 0; i + l < shift ; i++)	cout << " ";
				cout << " " << "\033[0;32m" << "[ Passed ]" << "\033[0m";
			}
		} else {
			if (verbose == 0) cout << "Running test: " << tests[i]->getName();
			for (int i = 0; i + l < shift ; i++)	cout << " ";
			cout << " " << "\033[1;31m" << "[ FAILED ]" << "\033[0m" ;
		}	 
 		time_t t = tests[i]->getCompileTime();
		struct tm * ts = localtime(&t); 
		char buf[128];
		strftime (buf, 128, "%F %T", ts );
		cout << "( Last modified at " << buf << ")" << endl;
		time_t now = timer->now();
	 	if (passed) {
			tests[i]->setPassedTime(now);
		}	
	}
}
void UnitTester::runTests() {
	if (!readDataFile()) 
		cout << "problem reading data file" << endl;
	for (unsigned int i = 0; i < nTests; i++) 
		runTest(i);
	writeDataFile();
}
void UnitTester::runRecomiledTests() {	// runs all tests that were recompiled
	if (!readDataFile()) 
		cout << "problem reading data file" << endl;
	for (unsigned int i = 0; i < nTests; i++) 
		if (!(tests[i]->isPassed())) 
			runTest(i);
	writeDataFile();

}
void UnitTester::runBenchmark(const unsigned int i) {
	if (i < nBenchmarks && benchs[i]) {
		if (!benchs[i]->allDatFilesArePresent() || !benchs[i]->isPassed()) {
			cout << "Running " << benchs[i]->getName() << " ( " ;
			if (!benchs[i]->allDatFilesArePresent())
				cout << " dat files are missing";
			if (!benchs[i]->isPassed()) 
				cout << " recompiled";
			cout << " )" << endl;
			benchs[i]->run();
		} else {
			cout << "Skipping " << benchs[i]->getName() << endl;
		}
	}
}
void UnitTester::runBenchmarks() {
	if (!readDataFile()) 
		cout << "problem reading data file" << endl;
	if (verbose > 0) {
		cout << "Running all benchmarks" << endl;
	}
	for (unsigned int i = 0; i < nBenchmarks; i++)
		runBenchmark(i);
	writeDataFile();
}
bool UnitTester::writeToDataFile( Test & test, ofstream & fout) const{
	fout << test.getName()  << '\n';
	time_t tp = test.whenPassed();
	char buf[128]; 
	test.printDateAndTime(buf, tp);
	if (test.isPassed())	fout << getPassedStr() << buf;
	else 						fout << getFailedStr() << buf;
	tp = test.getCompileTime(); 
	test.printDateAndTime(buf, tp);
	fout << getCompiledStr() << buf;
	return true;
}
void UnitTester::writeDataFile() const {
	const char * filename = getDataFileName();
	ofstream fout(filename);
	writeToDataFile(tests ,      nTests, fout);
	writeToDataFile(benchs, nBenchmarks, fout);

}
bool UnitTester::readFromDataFile( Test & test, std::ifstream & fin) const {
	const int S = 128;
	char buf[S];
	fin.getline(buf, S); bool res = fin.good(); if (!res) return false;
	size_t l  = strlen(getPassedStr());
	size_t l2 = strlen(getCompiledStr());
	bool passed = strncmp ( buf, getPassedStr(), l ) == 0;
	time_t filePassedTime  = test.getDateAndTime(buf + l);
	fin.getline(buf, S); res &= fin.good(); if (!res) return false;
	time_t fileCompileTime = test.getDateAndTime(buf + l2);
	if ( res && passed && fileCompileTime == test.getCompileTime() &&
		filePassedTime >= fileCompileTime) {
		test.setPassedTime(filePassedTime); 
	}
	return res;
}
template <class T>
bool UnitTester::readFromDataFile(T ** t, const unsigned int nT, std::ifstream & fin) const {
	const int S = 128;	const unsigned int nF = UINT_MAX;
	char buf[S];	bool res = true;

	unsigned int nTestsInFile = 0;
	fin.getline(buf, S); res &= fin.good(); if (!res) return false;
	nTestsInFile = atoi(buf);

	for (unsigned int i = 0; i < nTestsInFile; i++) {
		fin.getline(buf, S);
		unsigned int found = i < nT && strcmp(buf, t[i]->getName())==0 ? i : nF;
		if (found == nF) {
			for (unsigned int j = 0; j < nT && found == nF; j++)
				found = strcmp(buf, t[j]->getName()) == 0 ? j : nF;
		} 
		if (found == nF) { // test was removed from our application 
			cout << "warning " << buf << " was not found" << endl;
			fin.getline(buf, S); res &= fin.good(); 
			fin.getline(buf, S); res &= fin.good();
		} else {
			res &= readFromDataFile(*t[found], fin); 
		}
		if (!res) return false;
	}
	return res;
}
template <class T>
bool UnitTester::writeToDataFile (T ** t, const unsigned int nT, std::ofstream & fout) const {
	fout << nT << endl;
	bool res = true;
	for (unsigned int i = 0; i < nT; i++) 
	if ( t[i] ) {	writeToDataFile(*t[i], fout);	} else res = false;
	return res;
}
bool  UnitTester::readDataFile() const{
	ifstream fin(getDataFileName());
	bool res = fin.is_open();             if (!res) return false;
	res &= readFromDataFile( tests,      nTests, fin); if (!res) return false;
	res &= readFromDataFile(benchs, nBenchmarks, fin); if (!res) return false;
	return res;
}

// TEST STUFF
Test::Test() {
	name 			= 0;
	description	= 0;
	author 		= 0;
	developer 	= 0;
	passedTime  = 0;	
	passed      = false;
	setCompileTime(__DATE__, __TIME__); // each class that inherits this one should also have this line.
}
Test::~Test() {
	if (name        ) delete [] name			; name 			= 0; 
	if (description ) delete [] description; description	= 0;
	if (author      ) delete [] author		; author 		= 0;
	if (developer   ) delete [] developer	; developer 	= 0;
}
void Test::set(char * & setting, const char * val) {
	int l = strlen(val);
	if (setting) delete [] setting; setting = 0;
	setting = new char[l + 1] ;
	memcpy(setting, val, l);
	setting[l] = '\0';
}
bool Test::fexists(const char * filename) {
	ifstream ifile(filename);
	return ifile;
}
// Unit Test stuff
UnitTest::UnitTest():Test() {	
	UnitTester::instance()->registerTest(*this);
}
UnitTest::~UnitTest() {	UnitTester::instance()->deregisterTest(*this);}

// Benchmark stuff
template <> void Benchmark::addMetric(Metric<double> & metric, const char * name) {
	names->insert(name, nMaxIntMetrics + nFloatMetrics); 
	floatMetrics[nFloatMetrics++] = &metric; 
}
template <> void Benchmark::getMetric(Metric<double>* & metric, const char * name) {
	int ID = names->getID(name);
	metric = (ID != Map::MaxID&& ID >= nMaxIntMetrics) ?
				floatMetrics[ID - nMaxIntMetrics] : NULL;
}
template <> void Benchmark::addMetric(Metric<int   > & metric, const char * name) {
	names->insert(name, nIntMetrics); 
	intMetrics[nIntMetrics++] = &metric; 
}
template <> void Benchmark::getMetric(Metric<int>* & metric, const char * name) {
	int ID = names->getID(name);
	metric = (ID != Map::MaxID&& ID >= 0) ? intMetrics[ID] : NULL;
}
const char * Benchmark::getMetricName(int index) const {
	return names->getName(index);	
}
Benchmark::Benchmark(Implementation & impl_, const int maxIntMetrics, const int maxFloatMetrics ):impl(impl_) {
	UnitTester::instance()->registerBench(*this);
	nFloatMetrics = 0;
	nIntMetrics = 0;
	nMaxIntMetrics = maxIntMetrics;
	assert(maxIntMetrics + maxFloatMetrics < Map::MaxID); 
	names        = new Map() ;
	intMetrics   = new Metric<int    >* [  maxIntMetrics];
	floatMetrics = new Metric<double >* [maxFloatMetrics];
	timer        = Timer::instance();
	for (int i = 0; i < nTimers  ; i++)    timeMetrics[i] = 0;
	memory = NULL; success = NULL; baseName = NULL; next = NULL;
	setCompileTime(__DATE__, __TIME__); // each class that inherits this one should also have this line.
	setCompileTime(impl.getCompileTime());
	xlabel = "Problem size";

}
void Benchmark::addMemoryMetrics() {
	if (memory == NULL) {
		memory = new Metric<int>(nRuns, "Bytes");
		if (memory) addMetric(*memory, "Dynamic Memory");
	}
}
void Benchmark::addSuccessMetrics() {
	if (success == NULL) {
		success = new Metric<int>(nRuns);
		if (success) addMetric(*success, "Computation succeeded");
	}
}

Benchmark::~Benchmark(){
	UnitTester::instance()->deregisterBench(*this);
	for (int i = 0; i < nIntMetrics; i++) {
		if (intMetrics[i]) delete intMetrics[i]; intMetrics[i] = 0;
	}
	for (int i = 0; i < nFloatMetrics; i++) {
		if (floatMetrics[i]) delete floatMetrics[i]; floatMetrics[i] = 0;
	}
	if (        next) delete           next;         next = 0; 
	if (       names) delete          names;        names = 0;
	if (  intMetrics) delete[]   intMetrics;   intMetrics = 0;
	if (floatMetrics) delete[] floatMetrics; floatMetrics = 0;
	if (   baseName ) delete[]     baseName;     baseName = 0; 
}
char * Benchmark::getMetricFileName(const char * metricName, 
	const char * directory, const char * extension, const char * benchName ) const {
	const char * myName = benchName == NULL ? getName() : benchName;
	assert(directory  != NULL);
	assert(myName     != NULL);
	assert(metricName != NULL);
	int l1 = strlen(directory);
	int l2 = strlen(myName);
	int l3 = strlen(metricName);
	int l4 = extension != NULL ? strlen(extension) : 0;
	int total = l1 + l2 + l3 + l4 + 2;
	char * res = new char[total];
	if ( res ) {
		memcpy(res, directory, l1); 
		for (int i = l1; i < l1 + l2; i++) {
			char a = myName[i-l1];
			res[i] = UnitTester::isGoodInFileName(a) ? a : '_';
		}
		res[l1+l2] = '_';
		for (int i = l1 + l2 + 1; i < l1 + l2 + l3 + 1; i++) {
			char a = metricName[i - l1 - l2 - 1];
			res[i] = UnitTester::isGoodInFileName(a) ? a : '_';
		}
		for (int i = l1 + l2 + l3 + 1; i < l1 + l2 + l3 + l4 + 1; i++) {
			char a = extension[i - l1 - l2 - 1 - l3 ];
			res[i] = UnitTester::isGoodInFileName(a) ? a : '_';
		}

		res[total-1] = '\0';
	}
	return res;
}

template <typename T>
void Benchmark::reportResults(const Metric<T> & metric, const char * MetricName,	const double * res) {
	assert(res != NULL); assert(MetricName != NULL);
	const char * filename = getMetricFileName(
		MetricName, // metric Name
		UnitTester::instance()->getBenchmarkDirectory(), ".dat" 	) ;
	ofstream fout(filename);
	fout << "# Results on running benchmark " << getName() << '\n';
	fout << "# Data for " << MetricName << '\n';
	fout << "# N Avg Sigma Min 1stQuartile Median 3stQuartile Max" << '\n'; 
	for (int k = 0; k < nConfigurations; k++) {
		fout << getProblemSize(k) << " "; 
		cout << getProblemSize(k) << endl;
		for (int j = 0; j < 7; j++) 
			fout << res[7 * k + j] << " ";
		fout << '\n';
	}
	fout.close();
	const char * datFileName = filename; filename = 0;
	filename = getMetricFileName(	MetricName, // metric Name
		UnitTester::instance()->getBenchmarkDirectory(), ".gnuplot" 	) ;
	const char * plotOut = getMetricFileName( MetricName, "./" , ".png" 	) ;
	if (datFileName) delete [] datFileName; datFileName = 0;
	datFileName = getMetricFileName(	MetricName, "./", ".dat" ) ;

	fout.open(filename);

	fout << "set terminal png\n";
	fout << "set output '" << plotOut << "'\n";
	fout << "set boxwidth 0.2 absolute\n";
	fout << "set title \"" << getName() << " " << MetricName << "\" \n"; 
	fout << "set autoscale\n";
//	fout << "set xrange[0:" << nConfigurations + 1<< "]\n";
//	fout << "set yrange[0:3e-6]\n";
//	fout << "set xlabel \"Configuration Index\"\n";
	fout << "set xlabel \"" << xlabel << "\"\n";
	fout << "set ylabel \"" << MetricName;
	if (metric.hasUnits()) 	fout << "(" << metric.getUnits() << ")";
	fout <<"\"\n";
	fout << "set style fill empty\n";
	fout << "plot '" << datFileName << "' using 1:5:4:8:7 with candlesticks title '"<< impl.getName() << "' whiskerbars,  '' using 1:6:6:6:6 with candlesticks lt -1 notitle\n";
	fout.close();
	if (datFileName) delete [] datFileName; datFileName = 0;
	if (   filename) delete []    filename;    filename = 0;
	if (    plotOut) delete []     plotOut;     plotOut = 0;
/* he box and whiskers plot
 *
 * There are a few different styles of box and whiskers plots and this link
 * gives a pretty good description.
 *
 * The two gotchas for were what values you need and in what order. These
 * scripts are set up to use the following columns, in order:
 *
 * X value (for ordering left-right across the graph)
 * Min
 * 1st Quartile
 * Median
 * 3rd Quartile
 * Max
 * The other gotcha I had was wanting to have labels across the x-axis rather
 * than numbers. To accomplish this, I added a 7th column with the label, then
 * I used the following:
 *
 * set xrange[0:3]
 * set yrange[0:10]
 *
 * # Data columns: X Min 1stQuartile Median 3rdQuartile Max Titles
 * set bars 4.0
 * set style fill empty
 * plot 'data.csv' using 1:3:2:6:5:xticlabels(7) with candlesticks title 'Quartiles' whiskerbars, \
 *   ''         using 1:4:4:4:4 with candlesticks lt -1 notitle
 *   The xticlabels(7) is the key to the x-axis labels, saying to use the values from column 7 for the labels.
 *
 * */

}
static const char * timerNames[]= {"ProcTime","UserTime","RealTime","ThreadTime", "MonoTime", "ClockTime"};
void Benchmark::addTimerMetrics() {
	for (int i = 0; i < nTimers; i++) if (timeMetrics[i] == 0) {
		timeMetrics[i] = new Metric<double> (nRuns, "sec");	
		if (timeMetrics[i]) addMetric(*(timeMetrics[i]), timerNames[i]);
	}
}
void Benchmark::startTimings() {
	timings[0] = timer->getProcTime();
	timings[1] = timer->getUserTime();
	timings[2] = timer->getRealTime();
	timings[3] = timer->getThreadTime();
	timings[4] = timer->getMonoTime();
	timings[5] = timer->getClockTime();
}
void Benchmark::stopTimings() {
	timings[0] = timer->getProcTime()  - timings[0];
	timings[1] = timer->getUserTime()  - timings[1];
	timings[2] = timer->getRealTime()  - timings[2];
	timings[3] = timer->getThreadTime()- timings[3];
	timings[4] = timer->getMonoTime()  - timings[4];
	timings[5] = timer->getClockTime() - timings[5];
	
	for (int i = 0; i < nTimers; i++) {
		if (timeMetrics[i] )	timeMetrics[i]->addMeasurement(timings[i]);
//		cout.precision(2);
//		cout << timerNames[i] << " " << timings[i] << " s" << endl;
	}
//	Metric<int> * mem = NULL;	getMetric(mem, "Dynamic Memory");
	if (memory )  memory->addMeasurement(impl.getMaxUsedMemory());
	if (success) success->addMeasurement(impl.computaionSuccessed());

}
bool Benchmark::allDatFilesArePresent() const {
	bool res = true;
	const char * benchDir = UnitTester::instance()->getBenchmarkDirectory();
	const char * filename = NULL;
	for (int i = 0; i < nFloatMetrics + nMaxIntMetrics; i++) {
		const char * metricName =  names->getName(i);
		if ( metricName != NULL)	{
			filename = getMetricFileName(metricName, benchDir, ".dat");
			res &= fexists(filename);
			delete [] filename; filename = NULL;
			if (!res) return res;
		}
	}
	return res;
}
void Benchmark::run() {
	double ** results = new double* [getNMetrics()];
	if (results) {
		// ALLOCATE MEMORY
		for (int i = 0; i < getNMetrics(); i++) 
			results[i] = new double[ nConfigurations * 7];	
		for (int i = 0; i < nConfigurations; i++) {
			if (!setConfigurationEachRun)    setRunConfiguration(i);
			for (int r = 0; r < nRuns; r++) {
				if (setConfigurationEachRun)  setRunConfiguration(i);
				prepareRun();
				startTimings();
				makeRun();
				stopTimings();
				finalizeRun();
			}
			for (int j = 0; j < nIntMetrics; j++) {
				assert(intMetrics[j]->getSampleSize() == nRuns);
				intMetrics[j]->makeFinal();
				intMetrics[j]->storeMetrics(results[j] + i * 7);
				intMetrics[j]->reset();
			} 
			for (int j = 0; j < nFloatMetrics; j++) {
				assert(floatMetrics[j]->getSampleSize() == nRuns);
				floatMetrics[j]->makeFinal();
				floatMetrics[j]->storeMetrics(results[(j + nIntMetrics)] + i * 7);
				floatMetrics[j]->reset();
			} 
		}
	// REPORT RESULTS
		for (int i = 0; i < nIntMetrics; i++)
			reportResults(*intMetrics[i], names->getName(i), results[i]);
		for (int i = 0; i < nFloatMetrics; i++) {
			double * res = results[(i + nIntMetrics)];
			reportResults( *floatMetrics[i], 
								names->getName(i + nMaxIntMetrics),	res);
		}
	
	// FREE MEMORY
		for (int i = 0; i < getNMetrics(); i++) {
			if (results[i]) delete[] results[i]; results[i] = 0;
		}
		delete [] results; results = 0;
	}
	cout << "Setting pass time" << endl;
	setPassedTime(timer->now());
	
	
};



UNITTEST(TestMetric);
TestMetric::TestMetric() {
	setName 				("Test Metric");
	setDescription  	("Test Metric for correct bahaviour & memory allocation");
	setTestAuthor		("Yordan Madzhunkov");
	setDeveloperAuthor("Yordan Madzhunkov");
	setCompileTime     (__DATE__, __TIME__);
};
bool TestMetric::test() const {
	const int n = 9;
	Metric<double> m1(n);
	double vals[] = {  0, 0, 1, 2, 3, 13, 27, 61, 63 };
	for (int i = 0; i < n; i++) m1.addMeasurement(vals[i]);
	m1.makeFinal();
	bool res = true; 
	double tol = 1e-15;
	res &= fabs(m1.getFirstQuater () - vals[2]) < tol;
	res &= fabs(m1.getMedian      () - vals[4]) < tol;
	res &= fabs(m1.getThirdQuater () - vals[6]) < tol;
	int vals2[] = {  0, 0, 1, 2, 3, 13, 27, 61, 63 };
	Metric<int> m2(n);
	for (int i = 0; i < n; i++) m2.addMeasurement(vals2[i]);
	m2.makeFinal();
	res &= fabs(m2.getMin         () - vals[0]) < tol;
	res &= fabs(m2.getFirstQuater () - vals[2]) < tol;
	res &= fabs(m2.getMedian      () - vals[4]) < tol;
	res &= fabs(m2.getThirdQuater () - vals[6]) < tol;
	m2.reset();
	for (int i = 0; i < n-1; i++) m2.addMeasurement(vals2[i]);
	m2.makeFinal();
	res &= fabs(m2.getFirstQuater () - 0.75) < tol;
	res &= fabs(m2.getMedian      () - 2.5) < tol;
	res &= fabs(m2.getThirdQuater () - (13 * 3 + 27 * 1) * 0.25) < tol;
	return res;
}
TestMetric testMetric;

