#include "stdafx.h"

namespace classifier_test
{

namespace high_order
{

concept_cleaner::concept_cleaner()
{
}

concept_cleaner::concept_cleaner(const concept_cleaner& Val)
{
}

void concept_cleaner::serialize(serializer& Serializer)
{
}

void concept_cleaner::initialize_options(command_line_options& Options) const
{
}

void concept_cleaner::apply_arguments(const command_line_arguments& Args)
{
}

high_order_concept_cleaner::high_order_concept_cleaner()
{
	ErrThreshold = 0.1;
}

high_order_concept_cleaner::high_order_concept_cleaner(const high_order_concept_cleaner& Val)
	: concept_cleaner(Val)
{
	ErrThreshold = Val.ErrThreshold;
}

smart_ptr<concept_cleaner> high_order_concept_cleaner::clone() const
{
	return make_smart_ptr(new high_order_concept_cleaner(*this));
}

void high_order_concept_cleaner::serialize(serializer& Serializer)
{
	concept_cleaner::serialize(Serializer);
	Serializer("ErrThreshold", ErrThreshold);
}

void high_order_concept_cleaner::initialize_options(command_line_options& Options) const
{
	concept_cleaner::initialize_options(Options);
	Options.add("error").set_argument("<value>").set_info("Alowed increment rate of classification error while cleaning concepts, default 0.1");
}

void high_order_concept_cleaner::apply_arguments(const command_line_arguments& Args)
{
	concept_cleaner::apply_arguments(Args);
	ErrThreshold = to_double(Args["error"].get_or_default("0.1"));
}

double high_order_concept_cleaner::get_error_threshold() const
{
	return ErrThreshold;
}

void high_order_concept_cleaner::set_error_threshold(double Val)
{
	ErrThreshold = Val;
}

void high_order_concept_cleaner::set_chunk_count(int Val)
{
	Chunks.resize(Val);
}

void high_order_concept_cleaner::set_chunk(int I, int Begin, int End, int Concept)
{
	Chunks[I].begin = Begin;
	Chunks[I].end = End;
	Chunks[I].concept = Concept;
}

void high_order_concept_cleaner::set_concept_count(int Val)
{
	Concepts.resize(Val);
}

void high_order_concept_cleaner::set_concept(int I, smart_ptr<validation_dataset> VDataset, smart_ptr<validation_classifier> VClassifier, double VError)
{
	Concepts[I].reset(new my_concept);
	Concepts[I]->vdataset = VDataset;
	Concepts[I]->vclassifier = VClassifier;
	Concepts[I]->verror = VError;
}

void high_order_concept_cleaner::compute()
{
	logger::task LTask = clogger()->fine("Cleaning concepts");
	for (int I = 0; I < (int)Chunks.size(); I++) Chunks[I].concept_ptr = &*Concepts[Chunks[I].concept];
	clean();
	for (int I = 0; I < (int)Concepts.size(); I++) Concepts[I]->index = I;
	for (int I = 0; I < (int)Chunks.size(); I++) Chunks[I].concept = Chunks[I].concept_ptr->index;
	group_chunks_by_concepts();
	clogger()->fine("# of concepts after cleaning: " + to_string(Concepts.size()));
}

int high_order_concept_cleaner::get_chunk_count() const
{
	return Chunks.size();
}

const concept_cleaner::chunk& high_order_concept_cleaner::get_chunk(int I) const
{
	return Chunks[I];
}

int high_order_concept_cleaner::get_concept_count() const
{
	return Concepts.size();
}

const concept_cleaner::concept& high_order_concept_cleaner::get_concept(int I) const
{
	return *Concepts[I];
}

void high_order_concept_cleaner::clean()
{
	double InitErrors = 0;
	vector<pair<double, my_concept*> > List;
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		List.push_back(make_pair(Concepts[I]->vdataset->size() * (1 - Concepts[I]->verror), &*Concepts[I]));
		InitErrors += Concepts[I]->verror * Concepts[I]->vdataset->size();
	}
	double BestErrors = InitErrors;
	sort(List.begin(), List.end());
	double CurrErrors = BestErrors;
	for (int I = 0; I < (int)List.size(); I++)
	{
		my_concept* Con = List[I].second;
		double MinErr = numeric_limits<double>::max();
		my_concept* BestCon = 0;
		for (int J = 0; J < (int)Concepts.size(); J++) if (&*Concepts[J] != Con)
		{
			double Err = Concepts[J]->vclassifier->test_error(&*Con->vdataset);
			if (Err >= MinErr) continue;
			MinErr = Err;
			BestCon = &*Concepts[J];
		}
		double Errors = CurrErrors - Con->verror * Con->vdataset->size() + MinErr * Con->vdataset->size();
		if (Errors > BestErrors * (1 + ErrThreshold)) continue;
		if (Errors < BestErrors) BestErrors = Errors;
		CurrErrors = Errors;
		replace_concept(Con, BestCon, MinErr);
		remove_concept(Con);
	}
	clogger()->fine("Initial, best and current errors: " + to_string(InitErrors) + " " + to_string(BestErrors) + " " + to_string(CurrErrors));
}

void high_order_concept_cleaner::group_chunks_by_concepts()
{
	vector<my_chunk> OldChunks = Chunks;
	Chunks.clear();
	for (int I = 0; I < (int)OldChunks.size();)
	{
		int J = I + 1;
		while (J < (int)OldChunks.size() && OldChunks[J].concept == OldChunks[I].concept) J++;
		Chunks.push_back(OldChunks[I]);
		Chunks.back().end = OldChunks[J - 1].end;
		I = J;
	}
}

void high_order_concept_cleaner::replace_concept(my_concept* Old, my_concept* New, double Err)
{
	for (int I = 0; I < (int)Chunks.size(); I++)
		if (Chunks[I].concept_ptr == Old) Chunks[I].concept_ptr = New;
	New->verror =
		(New->verror * New->vdataset->size() + Old->verror * Old->vdataset->size())
		/ (New->vdataset->size() + Old->vdataset->size());
	New->vdataset->add(&*Old->vdataset);
}

void high_order_concept_cleaner::remove_concept(my_concept* Con)
{
	for (int I = 0; I < (int)Concepts.size(); I++)
		if (&*Concepts[I] == Con)
		{
			Concepts.erase(Concepts.begin() + I);
			return;
		}
		assert(false);
}

}

}
