#include "stdafx.h"

namespace classifier_test
{

classifier_tester::build_result::build_result()
	: build_time(0), data_count(0)
{
}

classifier_tester::test_result::test_result()
	: test_time(0), errors(0), data_count(0)
{
}

double classifier_tester::test_result::get_error_rate() const
{
	return (double)errors / data_count;
}

void classifier_tester::serialize(serializer& Serializer)
{
	Serializer
		("Scheme", Scheme)
		("BaseClassifier", BaseClassifier)
		("Classifiers", Classifiers);
}

const record_scheme& classifier_tester::get_scheme() const
{
	return Scheme;
}

void classifier_tester::set_scheme(const record_scheme& Val)
{
	Scheme = Val;
}

classifier* classifier_tester::get_base_classifier() const
{
	return &*BaseClassifier;
}

void classifier_tester::set_base_classifier(classifier* Val)
{
	BaseClassifier = Val->clone();
}

subarray<stream_classifier* const> classifier_tester::get_classifiers() const
{
	return make_subarray(Classifiers).convert<stream_classifier*>();
}

void classifier_tester::set_classifiers(subarray<stream_classifier* const> Val)
{
	Classifiers.resize(Val.size());
	for (int I = 0; I < Val.size(); I++) Classifiers[I] = Val[I]->clone();
}

void classifier_tester::clear_classifiers()
{
	reset_vector(Classifiers);
}

void classifier_tester::add_classifier(stream_classifier* Val)
{
	Classifiers.push_back(Val->clone());
}

void classifier_tester::skip(data_stream* Stream, int SkipCount)
{
	logger::task LTask = clogger()->info("Skipping records");
	stream_record Rec;
	for (int No = 0; No < SkipCount; No++)
	{
		if (No % 1000 == 0) LTask.progress(to_string(No) + " records have been skipped");
		Stream->read(Rec);
	}
}

void classifier_tester::build(data_stream* Stream, int BuildCount)
{
	logger::task LTask = clogger()->info("Building classifiers");

	BaseClassifier->set_domain(Scheme.to_domain());
	for (int I = 0; I < (int)Classifiers.size(); I++)
	{
		Classifiers[I]->set_domain(BaseClassifier->get_domain());
		Classifiers[I]->set_base_classifier(&*BaseClassifier);
	}
	BuildResults.clear();
	BuildResults.assign(Classifiers.size(), build_result());

	vector<smart_ptr<stream_record> > Data;
	read_records(Stream, Data, BuildCount, 0, BuildCount, LTask);
	clogger()->info("# of records: " + to_string(Data.size()));
	for (int I = 0; I < (int)Classifiers.size(); I++)
	{
		clogger()->info("Building " + to_string(I) + "th classifier");
		timer Timer;
		Timer.start();
		Classifiers[I]->build(make_subarray(Data).convert<const stream_record* const>());
		BuildResults[I].build_time = Timer.time();
		BuildResults[I].data_count = Data.size();
	}

	string Msg;
	for (int I = 0; I < (int)Classifiers.size(); I++) Msg += to_string((float)BuildResults[I].build_time) + " ";
	clogger()->info("Build times(s): " + Msg);
}

void classifier_tester::test(data_stream* Stream, int TestCount)
{
	logger::task LTask = clogger()->info("Testing classifiers");

	TestResults.clear();
	TestResults.assign(Classifiers.size(), test_result());

	int Count = 0;
	int Chunk = 100000;
	vector<smart_ptr<stream_record> > Data(Chunk);
	for (;;)
	{
		if (TestCount < 0) read_records(Stream, Data, Chunk, Count, TestCount, LTask);
		else read_records(Stream, Data, min(Chunk, TestCount - Count), Count, TestCount, LTask);
		if (Data.empty()) break;
		Count += Data.size();
		test_on(make_subarray(Data).convert<const stream_record* const>(), LTask);
	}
	LTask.progress("");

	clogger()->info("# of records: " + to_string(Count));
	string Msg;
	for (int I = 0; I < (int)Classifiers.size(); I++) Msg += to_string((float)TestResults[I].test_time) + " ";
	clogger()->info("Test times(s): " + Msg);
	Msg.clear();
	for (int I = 0; I < (int)Classifiers.size(); I++) Msg += to_string((float)TestResults[I].get_error_rate()) + " ";
	clogger()->info("Error rates: " + Msg);
}

const vector<classifier_tester::build_result>& classifier_tester::get_build_results() const
{
	return BuildResults;
}

const vector<classifier_tester::test_result>& classifier_tester::get_test_results() const
{
	return TestResults;
}

void classifier_tester::write_classifications(FILE* File, int Classifier) const
{
	const test_result& Result = TestResults[Classifier];
	for (int I = 0; I < Result.data_count; I++)
	{
		checked_fprintf(File, "%s,", Scheme.back().discr.get_value(Result.classifications[I]).c_str());
		if (Result.corrects[I]) checked_fprintf(File, "correct.\n");
		else checked_fprintf(File, "incorrect.\n");
	}
}

void classifier_tester::test_on(subarray<const stream_record* const> Data, logger::task& LTask)
{
	string Msg = LTask.get_progress();
	vector<discr_value> Classifications(Data.size());
	vector<bool> Corrects(Data.size());
	for (int I = 0; I < (int)Classifiers.size(); I++)
	{
		LTask.progress(Msg + ", testing " + to_string(I) + "th classifier");
		int Errors = 0;
		timer Timer;
		Timer.start();
		for (int No = 0; No < Data.size(); No++)
		{
			discr_value Class = Classifiers[I]->classify(*Data[No]);
			Classifications[No] = Class;
			bool Correct = Class == Data[No]->back().discr;
			if (!Correct) Errors++;
			Corrects[No] = Correct;
			Classifiers[I]->train(*Data[No]);
		}
		TestResults[I].test_time += Timer.time();
		TestResults[I].errors += Errors;
		TestResults[I].data_count += Data.size();
		TestResults[I].classifications.insert(TestResults[I].classifications.end(), Classifications.begin(), Classifications.end());
		TestResults[I].corrects.insert(TestResults[I].corrects.end(), Corrects.begin(), Corrects.end());
	}
}

void classifier_tester::read_records(data_stream* Stream, vector<smart_ptr<stream_record> >& Data, int MaxCount, int LogStart, int LogEnd, logger::task& LTask)
{
	if (MaxCount >= 0) Data.reserve(MaxCount);
	int Count = 0;
	while ((MaxCount < 0 || Count < MaxCount) && !Stream->eof())
	{
		if (Count % 1000 == 0)
			LTask.progress(to_string(LogStart + Count) + "/" + to_string(LogEnd) + " records have been read");
		if ((int)Data.size() <= Count) Data.push_back(make_smart_ptr(new stream_record));
		if (Data[Count].get() == 0) Data[Count].reset(new stream_record);
		Stream->read(*Data[Count++]);
	}
	Data.erase(Data.begin() + Count, Data.end());
}

}
