#include "stdafx.h"

namespace classifier_test
{

namespace high_order
{

concept_clustering::concept_clustering()
{
}

concept_clustering::concept_clustering(const concept_clustering& Val)
{
}

void concept_clustering::serialize(serializer& Serializer)
{
}

void concept_clustering::initialize_options(command_line_options& Options) const
{
}

void concept_clustering::apply_arguments(const command_line_arguments& Args)
{
}

void concept_clustering::set_base_classifier(classifier* Val)
{
	BaseClassifier = Val->clone();
}

double concept_clustering::compute_validation_error() const
{
	double Errors = 0;
	int Count = 0;
	for (int I = 0; I < get_concept_count(); I++)
		Errors += get_concept(I).verror * get_concept(I).vdataset->size(),
		Count += get_concept(I).vdataset->size();
	return Errors / Count;
}

high_order_concept_clustering::high_order_concept_clustering()
{
	Mode = graph_mode;
	ErrThreshold = 0;
}

high_order_concept_clustering::high_order_concept_clustering(const high_order_concept_clustering& Val)
	: concept_clustering(Val)
{
	Mode = Val.Mode;
	ErrThreshold = Val.ErrThreshold;
}

smart_ptr<concept_clustering> high_order_concept_clustering::clone() const
{
	return make_smart_ptr(new high_order_concept_clustering(*this));
}

void high_order_concept_clustering::set_chunk_count(int Val)
{
	Size = Val;
	Chunks.resize(Size);
}

void high_order_concept_clustering::set_chunk(int I, smart_ptr<validation_dataset> VDataset, smart_ptr<validation_classifier> VClassifier, double VError)
{
	Chunks[I].vdataset = VDataset;
	Chunks[I].vclassifier = VClassifier;
}

void high_order_concept_clustering::set_error_threshold(double Val)
{
	ErrThreshold = Val;
}

void high_order_concept_clustering::set_mode(mode Val)
{
	Mode = Val;
}

void high_order_concept_clustering::compute()
{
	logger::task LTask = clogger()->fine("Concept clustering");
	clogger()->fine("# of chunks: " + to_string(Size));
	init_nodes();
	init_merges();
	merge_nodes();
	find_concepts();
	reset_vector(Nodes);
	Graph.clear();
	Heap.clear();
	for (int I = 0; I < (int)Chunks.size(); I++)
		Chunks[I].vdataset.reset(), Chunks[I].vclassifier.reset();
	clogger()->fine("# of concepts: " + to_string(Concepts.size()));
}

const concept_clustering::chunk& high_order_concept_clustering::get_chunk(int I) const
{
	return Chunks[I];
}

int high_order_concept_clustering::get_concept_count() const
{
	return Concepts.size();
}

const concept_clustering::concept& high_order_concept_clustering::get_concept(int I) const
{
	return Concepts[I];
}

void high_order_concept_clustering::init_nodes()
{
	logger::task LTask = clogger()->fine("Initializing nodes");
	Nodes.reserve(Size * 2 - 1);
	Nodes.resize(Size);
	for (int I = 0; I < Size; I++)
	{
		Nodes[I].useful = true;
		Nodes[I].discarded = false;
		Nodes[I].left = Nodes[I].right = -1;
		Nodes[I].vdataset = Chunks[I].vdataset->clone();
		train_node(Nodes[I], Chunks[I].vclassifier);
	}
}

void high_order_concept_clustering::init_merges()
{
	logger::task LTask = clogger()->fine("Initializing merges");
	Graph.set_size(Size * 2 - 1);
	Heap.set_key_comp(merge_worse(&Nodes[0]));
	if (Mode == chain_mode) Heap.reserve(Size - 1);
	else if (Mode == graph_mode) Heap.reserve(Size * (Size - 1) / 2);
	if (Mode == chain_mode)
		for (int A = 0; A < Size - 1; A++) new_merge(A, A + 1);
	else if (Mode == graph_mode)
		for (int A = 0; A < Size; A++) for (int B = A + 1; B < Size; B++) new_merge(A, B);
}

void high_order_concept_clustering::merge_nodes()
{
	vector<char> Marks(Size * 2 - 1, false);
	vector<int> List;
	List.reserve(Size * 2 - 1);
	logger::task LTask = clogger()->fine("Merging nodes");
	clogger()->finest(format_string("%8s%16s%10s%16s", "Graph:", "Childs' sizes", "Key", "Error decrease"));
	while (!Heap.empty())
	{
		LTask.progress("There are " + to_string(Size * 2 - 1 - Nodes.size()) + " nodes remaining");
		merge Merge = Heap.top()->key;
		int A = Merge.left;
		int B = Merge.right;
		const merge_graph::edge_list &AList = Graph[A], &BList = Graph[B];
		List.clear();
		for (int I = 0; I < (int)AList.size(); I++)
		{
			int C = AList[I].to;
			Marks[C] = true;
			Heap.erase(AList[I].value);
			List.push_back(C);
		}
		for (int I = 0; I < (int)BList.size(); I++)
		{
			int C = BList[I].to;
			if (C != A) Heap.erase(BList[I].value);
			if (!Marks[C]) List.push_back(C);
		}
		assert(Marks[B]);
		int ASize = Nodes[A].vdataset->size();
		int BSize = Nodes[B].vdataset->size();
		do_merge(Merge);
		node& Node = Nodes.back();
		int AB = Nodes.size() - 1;
		clogger()->finest(format_string("%8d%8d%8d%10.0lf%16.1lf", AB, ASize, BSize, Merge.key, Nodes[A].best_errors + Nodes[B].best_errors - Node.errors));
		Graph.remove_vertex(A);
		Graph.remove_vertex(B);
		Graph[AB].reserve(List.size() - 2);
		for (int I = 0; I < (int)List.size(); I++)
		{
			int C = List[I];
			Marks[C] = false;
			if (C == A || C == B) continue;
			new_merge(AB, C);
		}
	}
	LTask.progress("");
	assert((int)Nodes.size() < Size * 2);
	for (int I = 0; I < (int)Nodes.size(); I++) Nodes[I].vdataset.reset();
}

void high_order_concept_clustering::find_concepts()
{
	vector<int> Belongs(Nodes.size(), -1);
	vector<int> ConceptNodes;
	for (int I = Nodes.size() - 1; I >= 0; I--)
	{
		if (Nodes[I].useful && !Nodes[I].discarded)
		{
			Belongs[I] = ConceptNodes.size();
			ConceptNodes.push_back(I);
		}
		if (Nodes[I].left >= 0 && Belongs[I] >= 0)
			Belongs[Nodes[I].left] = Belongs[Nodes[I].right] = Belongs[I];
	}
	Concepts.resize(ConceptNodes.size());
	for (int I = 0; I < Size; I++)
	{
		int J = Belongs[I];
		Chunks[I].concept = J;
		if (Concepts[J].vdataset.get() == 0)
			Concepts[J].vdataset = Chunks[I].vdataset;
		else
			Concepts[J].vdataset->add(&*Chunks[I].vdataset);
	}
	for (int I = 0; I < (int)Concepts.size(); I++)
	{
		int J = ConceptNodes[I];
		Concepts[I].vclassifier = Nodes[J].vclassifier;
		Concepts[I].verror = (double)Nodes[J].errors / Concepts[I].vdataset->size();
	}
}

void high_order_concept_clustering::train_node(node& Node, smart_ptr<validation_classifier> VClassifier)
{
	if (VClassifier.get() == 0) Node.vclassifier = Node.vdataset->train(&*BaseClassifier);
	else Node.vclassifier = VClassifier;
	Node.best_errors = Node.errors = Node.vclassifier->test_error(&*Node.vdataset) * Node.vdataset->size();
	if (Node.left >= 0)
	{
		double Temp = Nodes[Node.left].best_errors + Nodes[Node.right].best_errors;
		if (Temp < Node.best_errors) Node.best_errors = Temp;
	}
}

void high_order_concept_clustering::discard_node(node& Node)
{
	if (Node.discarded) return;
	Node.discarded = true;
	Node.vclassifier.reset();
	if (Node.left >= 0)
	{
		discard_node(Nodes[Node.left]);
		discard_node(Nodes[Node.right]);
	}
}

void high_order_concept_clustering::new_merge(int A, int B)
{
	merge Merge;
	Merge.left = A;
	Merge.right = B;
	Merge.key = 0;
	smart_ptr<validation_dataset> VDataset = Nodes[A].vdataset->clone();
	VDataset->add(&*Nodes[B].vdataset);
	if (Mode == chain_mode)
	{
		Merge.vclassifier = VDataset->train(&*BaseClassifier);
		double Errors = Merge.vclassifier->test_error(&*VDataset) * VDataset->size();
		double Delta = Errors - (Nodes[A].best_errors + Nodes[B].best_errors);
		if (Delta > sqrt((double)VDataset->size()) * 3) return;
		Merge.key = Delta;
	}
	else if (Mode == graph_mode)
	{
		vector<discr_value> AResults, BResults;
		Nodes[A].vclassifier->test_results(&*VDataset, AResults);
		Nodes[B].vclassifier->test_results(&*VDataset, BResults);
		double Dis = 1 - classification_similarity(AResults, BResults);
		double Delta = Dis * VDataset->size() / 2 - (Nodes[A].best_errors + Nodes[B].best_errors);
		if (Delta > sqrt((double)VDataset->size()) * 3) return;
		Merge.key = Dis;
	}
	Graph.add_edge(A, B, Heap.insert(Merge));
}

void high_order_concept_clustering::do_merge(const merge& Merge)
{
	int A = Merge.left;
	int B = Merge.right;
	Nodes.push_back(node());
	node& Node = Nodes.back();
	Node.left = A;
	Node.right = B;

	if (Nodes[A].vdataset->size() < Nodes[B].vdataset->size()) swap(A, B);
	Node.vdataset = Nodes[A].vdataset;
	Node.vdataset->add(&*Nodes[B].vdataset);
	Nodes[B].vdataset.reset();
	train_node(Node, Merge.vclassifier);

	Node.useful = Node.errors <= Node.best_errors * (1 + ErrThreshold);
	Node.discarded =false;
	if (Node.useful)
	{
		discard_node(Nodes[A]);
		discard_node(Nodes[B]);
	}
}

}

}
