#include "stdafx.h"

namespace classifier_test
{

namespace high_order
{

void stream_predictor::concept::serialize(serializer& Serializer)
{
	Serializer
		("classifier", classifier)
		("verror", verror);
}

stream_predictor::stream_predictor()
{

}

stream_predictor::stream_predictor(const stream_predictor& Val)
{
	Domain = Val.Domain;
	ClassCount = Val.ClassCount;
	if (Val.BaseClassifier.get() != 0) BaseClassifier = Val.BaseClassifier->clone();
}

void stream_predictor::serialize(serializer& Serializer)
{
	Serializer
		("Domain", Domain)
		("BaseClassifier", BaseClassifier);
	ClassCount = Domain.back().discr.count;
}

void stream_predictor::initialize_options(command_line_options& Options) const
{
}

void stream_predictor::apply_arguments(const command_line_arguments& Args)
{
}

const record_domain& stream_predictor::get_domain() const
{
	return Domain;
}

void stream_predictor::set_domain(const record_domain& Val)
{
	Domain = Val;
	ClassCount = Domain.back().discr.count;
}

classifier* stream_predictor::get_base_classifier() const
{
	return &*BaseClassifier;
}

void stream_predictor::set_base_classifier(classifier* Val)
{
	BaseClassifier = Val->clone();
}

void high_order_stream_predictor::my_concept::serialize(serializer& Serializer)
{
	concept::serialize(Serializer);
	Serializer
		("occur_freq", occur_freq)
		("length", length)
		("end_prob", end_prob);
}


high_order_stream_predictor::high_order_stream_predictor()
{
}

high_order_stream_predictor::high_order_stream_predictor(const high_order_stream_predictor& Val)
	: stream_predictor(Val)
{
}

smart_ptr<stream_predictor> high_order_stream_predictor::clone() const
{
	return make_smart_ptr(new high_order_stream_predictor(*this));
}

void high_order_stream_predictor::serialize(serializer& Serializer)
{
	stream_predictor::serialize(Serializer);
	Serializer
		("Concepts", Concepts)
		("StateProbs", StateProbs);
}

void high_order_stream_predictor::set_stream(subarray<const record* const> Data)
{
	Stream = Data;
}

void high_order_stream_predictor::set_chunk_count(int Val)
{
	Chunks.resize(Val);
}

void high_order_stream_predictor::set_chunk(int I, int Begin, int End, int Concept)
{
	Chunks[I].begin = Begin;
	Chunks[I].end = End;
	Chunks[I].concept = Concept;
}

void high_order_stream_predictor::set_concept_count(int Val)
{
	Concepts.resize(Val);
}

void high_order_stream_predictor::set_concept(int I, smart_ptr<validation_dataset> VDataset, smart_ptr<validation_classifier> VClassifier, double VError)
{
	Concepts[I].verror = VError;
	vector<const record*> Data;
	Data.reserve(VDataset->size());
	for (int J = 0; J < (int)Chunks.size(); J++)
		if (Chunks[J].concept == I)
			Data.insert(Data.end(), Stream.begin() + Chunks[J].begin, Stream.begin() + Chunks[J].end);
	Concepts[I].classifier = BaseClassifier->clone();
	Concepts[I].classifier->train(Data);
	Concepts[I].length = Data.size();
}

void high_order_stream_predictor::compute()
{
	compute_transitions();
	StateProbs.assign(Concepts.size(), 0);
	normalize_probabilities(StateProbs);
	for (int I = 0; I < Stream.size(); I++) train(*Stream[I]);
	reset_vector(Chunks);
}

void high_order_stream_predictor::train(const record& Record)
{
	update_states_by_data(Record);
	update_states_by_time();
}

classifier_test::discr_value high_order_stream_predictor::classify(const stream_record& Record) const
{
	vector<pair<double, int> > ConList(Concepts.size());
	for (int I = 0; I < (int)Concepts.size(); I++)
		ConList[I] = make_pair(StateProbs[I], I);
	sort(ConList.begin(), ConList.end(), greater<pair<double, int> >());
	array<double> Probs(ClassCount, 0);
	array<double> P;
	double RemainProb = 1;
	int CurrClass = 0;
	for (int I = 0; I < (int)ConList.size(); I++)
	{
		Concepts[ConList[I].second].classifier->estimate(Record, P);
		for (int J = 0; J < ClassCount; J++) Probs[J] += ConList[I].first * P[J];
		for (int J = 0; J < ClassCount; J++)
			if (Probs[J] > Probs[CurrClass]) CurrClass = J;
		RemainProb -= ConList[I].first;
		bool Done = true;
		for (int J = 0; J < ClassCount; J++)
			if (J != CurrClass && Probs[J] + RemainProb > Probs[CurrClass])
				Done = false;
		if (Done) break;
	}
	return CurrClass;
}

void high_order_stream_predictor::estimate(const stream_record& Record, array<double>& Probs) const
{
	Probs.assign(ClassCount, 0);
	array<double> P;
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		Concepts[I].classifier->estimate(Record, P);
		for (int J = 0; J < ClassCount; J++) Probs[J] += StateProbs[I] * P[J];
	}
	normalize_probabilities(Probs);
}

int high_order_stream_predictor::get_concept_count() const
{
	return Concepts.size();
}

const array<double>& high_order_stream_predictor::get_state_probabilities() const
{
	return StateProbs;
}

void high_order_stream_predictor::compute_transitions()
{
	logger::task LTask = clogger()->fine("Computing transition patterns");
	for (int I = 0; I < (int)Concepts.size(); I++) Concepts[I].occur_freq = 0;
	for (int I = 0; I < (int)Chunks.size(); I++) Concepts[Chunks[I].concept].occur_freq++;
	string Msg;
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		Concepts[I].end_prob = (double)Concepts[I].occur_freq / Concepts[I].length;
		Msg += to_string(Concepts[I].length) + "," + to_string((float)Concepts[I].verror) + " ";
	}
	clogger()->finer("Concept sizes and validation errors: " + Msg);
}

void high_order_stream_predictor::update_states_by_data(const record& Rec)
{
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		discr_value TrueClass = Rec.back().discr;
		discr_value Classified = Concepts[I].classifier->classify(Rec);
		if (TrueClass == Classified) StateProbs[I] *= 1 - Concepts[I].verror;
		else StateProbs[I] *= Concepts[I].verror;
	}
	normalize_probabilities(StateProbs);
}

void high_order_stream_predictor::update_states_by_time()
{
	if (Concepts.size() <= 1) return;
	array<double> OldProbs = StateProbs;
	StateProbs.assign(Concepts.size(), 0);
	int TotOccur = 0;
	for (int I = 0; I < (int)Concepts.size(); I++) TotOccur += Concepts[I].occur_freq;
	array<double> Weights(Concepts.size());
	double Sum = 0;
	for (int I = 0; I < (int)Concepts.size(); I++)
		Sum += Weights[I] = OldProbs[I] * Concepts[I].end_prob / (TotOccur - Concepts[I].occur_freq);
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		/*for (int J = 0; J < (int)Concepts.size(); J++)
		if (J != I)
		StateProbs[I] += OldProbs[J] * Concepts[J].end_prob *
		((double)Concepts[I].occur_freq / (TotOccur - Concepts[J].occur_freq));
		else
		StateProbs[I] += OldProbs[I] * (1 - Concepts[I].end_prob);*/

		StateProbs[I] = (Sum - Weights[I]) * Concepts[I].occur_freq + OldProbs[I] * (1 - Concepts[I].end_prob);
	}
}

}

}
