#include "stdafx.h"

namespace classifier_test
{

namespace high_drift
{

void kalman_filter::init()
{
	X = 0;
	assert(DecayRate > 0 && DecayRate <= 1);
	N = (1 - DecayRate) / DecayRate;
}

void kalman_filter::measure(double Z)
{
	X = (X * N + Z) / (N + 1);
}

double kalman_filter::guess() const
{
	return (X * N + 1) / (N + 2);
}

high_drift_classifier::high_drift_classifier()
{
}

high_drift_classifier::high_drift_classifier(const high_drift_classifier& Val)
	: stream_classifier(Val)
{
	BuildChunkSize = Val.BuildChunkSize;
}

smart_ptr<stream_classifier> high_drift_classifier::clone() const
{
	return make_smart_ptr(new high_drift_classifier(*this));
}

int high_drift_classifier::get_build_chunk_size() const
{
	return BuildChunkSize;
}

void high_drift_classifier::set_build_chunk_size(int Val)
{
	BuildChunkSize = Val;
}

void high_drift_classifier::build(subarray<const stream_record* const> Data)
{
	logger::task LTask = clogger()->fine("Building highdrift classifier");
	TrData = Data;
	compute_chunks();
	compute_concepts();
	compute_concept_classifiers();
	compute_filters();
	reset_vector(Chunks);
}

void high_drift_classifier::train(const stream_record& Record)
{
	assert(Domain.valid(Record));
	assert(!Concepts.empty());
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		int Class = Concepts[I]->Classifier->classify(Record);
		bool Hit = Class == Record.back().discr;
		Concepts[I]->Filter.measure(Hit);
		ConAccus[I] = Concepts[I]->Filter.guess();
	}
}

void high_drift_classifier::estimate(const stream_record& Record, array<double>& Probs) const
{
	int Con = max_element(ConAccus.begin(), ConAccus.end()) - ConAccus.begin();
	Concepts[Con]->Classifier->estimate(Record, Probs);
}

int high_drift_classifier::get_concept_count() const
{
	return Concepts.size();
}

const array<double>& high_drift_classifier::get_concept_accuracies() const
{
	return ConAccus;
}

void high_drift_classifier::initialize_options(command_line_options& Options) const
{
	stream_classifier::initialize_options(Options);
	Options.add("chunk-size").add_name("cz").set_argument("<count>").set_info("# of records in a chunk, default 200");
}

void high_drift_classifier::apply_arguments(const command_line_arguments& Args)
{
	stream_classifier::apply_arguments(Args);
	BuildChunkSize = to_int(Args["cz"].get_or_default("200"));
}

void high_drift_classifier::compute_chunks()
{
	logger::task LTask = clogger()->fine("Partitioning training data into chunks and building base classifiers");
	Chunks.clear();
	const stream_record*const* Ptr = TrData.begin();
	while (TrData.end() - Ptr >= BuildChunkSize)
	{
		smart_ptr<chunk> Chunk(new chunk);
		Chunk->Data = make_subarray(Ptr, BuildChunkSize).convert<record*>();
		Ptr += BuildChunkSize;
		Chunk->Classifier = BaseClassifier->clone();
		Chunk->Classifier->train(Chunk->Data);
		Chunks.push_back(Chunk);
	}
	assert(!Chunks.empty());
	clogger()->fine("# of chunks: " + to_string(Chunks.size()));
}

void high_drift_classifier::compute_chunk_similarities(vector<vector<float> >& Matrix) const
{
	logger::task LTask = clogger()->fine("Computing chunk similarities");
	vector<const record*> Data(TrData.begin(), TrData.end());
	if (Data.size() > 1000)
	{
		shuffle_n(Data.begin(), Data.begin() + 1000, Data.end());
		Data.erase(Data.begin() + 1000, Data.end());
	}
	vector<vector<discr_value> > ClassifyResults(Chunks.size());
	for (int I = 0; I < (int)Chunks.size(); I++)
	{
		classification_results(&*Chunks[I]->Classifier, Data, ClassifyResults[I]);
		Chunks[I]->Classifier.reset();
	}
	Matrix.resize(Chunks.size());
	for (int I = 0; I < (int)Chunks.size(); I++) Matrix[I].resize(Chunks.size());
	for (int I = 0; I < (int)Chunks.size(); I++)
	{
		Matrix[I][I] = 1;
		for (int J = I + 1; J < (int)Chunks.size(); J++)
		{
			double Val = classification_similarity(ClassifyResults[I], ClassifyResults[J]);
			Matrix[I][J] = Matrix[J][I] = (float)Val;
		}
	}
}

void high_drift_classifier::compute_concepts()
{
	logger::task LTask = clogger()->fine("Mining concepts");
	vector<vector<float> > SimMatrix;
	compute_chunk_similarities(SimMatrix);
	modularity_clustering P;
	P.set_size(Chunks.size());
	P.set_division_threshold(0.005);
	for (int I = 0; I < (int)Chunks.size(); I++)
		for (int J = I + 1; J < (int)Chunks.size(); J++)
			P.add_edge(I, J, SimMatrix[I][J]);
	reset_vector(SimMatrix);
	P.compute(true);

	Concepts.clear();
	for (int I = 0; I < P.get_cluster_count(); I++)
	{
		smart_ptr<concept> Concept(new concept);
		for (int J = 0; J < (int)Chunks.size(); J++)
			if (P.get_labels()[J] == I)
				Chunks[J]->Concept = &*Concept;
		Concepts.push_back(Concept);
	}
	clogger()->fine("# of concepts: " + to_string(Concepts.size()));
}

void high_drift_classifier::compute_concept_classifiers()
{
	logger::task LTask = clogger()->fine("Building concepts' classifiers");
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		vector<const record*> Data;
		for (int J = 0; J < (int)Chunks.size(); J++)
			if (Chunks[J]->Concept == &*Concepts[I])
				Data.insert(Data.end(), Chunks[J]->Data.begin(), Chunks[J]->Data.end());
		Concepts[I]->Classifier = BaseClassifier->clone();
		Concepts[I]->Classifier->train(Data);
	}
}

void high_drift_classifier::compute_filters()
{
	logger::task LTask = clogger()->fine("Tuning decay rates");
	ConAccus.resize(Concepts.size());
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		tune_filter(&*Concepts[I]);
		ConAccus[I] = Concepts[I]->Filter.guess();
	}
}

void high_drift_classifier::tune_filter(concept* Concept)
{
	vector<char> Hits(TrData.size());
	for (int I = 0; I < TrData.size(); I++)
		Hits[I] = Concept->Classifier->classify(*TrData[I]) == TrData[I]->back().discr;
	Concept->Filter.DecayRate = 0.5;
	tune_filter_param(Concept, Hits, 0.25);
	clogger()->fine("Decay rate: " + to_string(Concept->Filter.DecayRate));
}

void high_drift_classifier::tune_filter_param(concept* Concept, subarray<const char> Hits, double Delta)
{
	double Error = error_of_filter(Concept, Hits);
	double Old = Concept->Filter.DecayRate;
	while (Delta >= 0.0001)
	{
		Concept->Filter.DecayRate = Old + Delta;
		double Temp = error_of_filter(Concept, Hits);
		if (Temp < Error) Old = Concept->Filter.DecayRate, Error = Temp;
		else
		{
			Concept->Filter.DecayRate = Old - Delta;
			Temp = error_of_filter(Concept, Hits);
			if (Temp < Error) Old = Concept->Filter.DecayRate, Error = Temp;
		}
		Delta *= 0.5;
	}
	Concept->Filter.DecayRate = Old;
	error_of_filter(Concept, Hits);
}

double high_drift_classifier::error_of_filter(concept* Concept, subarray<const char> Hits)
{
	double Error = 0;
	Concept->Filter.init();
	for (int I = 0; I < TrData.size(); I++)
	{
		double Temp = Hits[I] - Concept->Filter.guess();
		Error += Temp * Temp;
		Concept->Filter.measure(Hits[I]);
	}
	return Error;
}

}

}
