package rmmk.algorithms.knn;

import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.TreeMap;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import rmmk.algorithms.features.FeatureManager;
import rmmk.algorithms.knn.math.Distance;
import rmmk.algorithms.knn.math.MinMax;
import rmmk.algorithms.knn.math.SimilarityCalculator;
import rmmk.algorithms.preprocessing.config.DistanceMetric;
import rmmk.algorithms.preprocessing.config.KnnType;
import rmmk.algorithms.similarityMeasures.SimilarityMeasureManager;
import rmmk.framework.datasources.Document;

/**
 * Algorytm knn, należy użyć dla niego fasady podczas obliczeń, jest w klasiw
 * KnnOwnImpl
 * 
 * @author marcin
 * 
 */
public class KnnAlgorithm {

	private static Logger logger = LoggerFactory
			.getLogger(KnnAlgorithm.class);
	
	/*
	 * public enum NormalizationType { Standarization, Normalization }
	 * 
	 * NormalizationType normalizationType;
	 */
	HashMap<Document, Double[]> normalizedVector = new HashMap<>();
	HashMap<Document, Double[]> normalizedTrainSet = new HashMap<>();
	Double[][] minMax;
	KnnType knnType;

	public KnnAlgorithm(List<Document> trainSet, FeatureManager fm,
			SimilarityMeasureManager smm, KnnType type) {
		this.knnType = type;
		if (type.equals(KnnType.FeatureVector))
			normalizeTrainSet(trainSet, fm);
		else
			teach(trainSet, smm);

	}

	private void teach(List<Document> trainSet, SimilarityMeasureManager smm) {

		if (knnType.equals(KnnType.SimilarityMeasure))
			for (Document d : trainSet) {
				normalizedVector.put(d, new Double[smm.getVectorLenght()]);
			}

	}

	private Double[] normalizeOneRow(Document document, FeatureManager fm) {
		if (this.normalizedVector.containsKey(document))
			return this.normalizedVector.get(document);

		int vectorLenght = fm.getVectorLenght();

		Double[] normalizedValues = new Double[vectorLenght];

		Double[] vector = fm.calculateFeatureFor(document);

		for (int y = 0; y < vectorLenght; ++y) {
			normalizedValues[y] = (vector[y] - minMax[y][0])
					/ (minMax[y][1] - minMax[y][0]);
		}

		normalizedVector.put(document, normalizedValues);

		return normalizedValues;
	}

	public Document calculate(Document input, FeatureManager fm,
			SimilarityMeasureManager smm, int k, DistanceMetric dm, KnnType type) {

		TreeMap<Integer, Document> sorted;

		switch (type) {
		case FeatureVector:

			Double[] normalizedInput = normalizeOneRow(input, fm);

			TreeMap<Double, Document> calculatedDistances = new TreeMap<Double, Document>();

			// for (Document row : this.normalizedVector.keySet()) {
			for (Entry<Document, Double[]> entry : this.normalizedTrainSet
					.entrySet()) {
				// Double[] rowVector = this.normalizeOneRow(row, fm);

				Double distance = Distance.calculateDistance(normalizedInput,
						entry.getValue(), dm);

				calculatedDistances.put(distance, entry.getKey());
			}

			sorted = classify(k, calculatedDistances);

			break;

		case SimilarityMeasure:

			TreeMap<Double, Document> calculatedSimilarityMeasures = new TreeMap<Double, Document>();

			for (Document row : this.normalizedVector.keySet()) {

				Double[] rowVector = smm.calculateSimilarityMeasureFor(input,
						row);
				Double resultSimilarity = SimilarityCalculator
						.calculateSimilarity(rowVector);

				calculatedSimilarityMeasures.put(resultSimilarity, row);
			}

			sorted = classify(k, calculatedSimilarityMeasures);

			break;

		default:
			sorted = null;
			break;
		}

		return getNominal(sorted.lastEntry().getValue(), input);// [sorted.lastEntry().getValue().length
																// - 1];
	}

	private TreeMap<Integer, Document> classify(int k,
			TreeMap<Double, Document> calculatedDistances) {
		HashMap<Document, Integer> score = new HashMap<>();

		if (this.knnType.equals(KnnType.FeatureVector)) {
			int x = 0;
			for (Entry<Double, Document> row : calculatedDistances.entrySet()) {
				if (x >= k)
					break;

				if (score.containsKey(row.getValue())) {
					int temp = score.get(row.getValue());

					score.remove(row.getValue());

					score.put(row.getValue(), ++temp);
				} else
					score.put(row.getValue(), 0);

				++x;
			}
		}else{
			int x = 0;
			for (Entry<Double, Document> row : calculatedDistances.descendingMap().entrySet()) {
				if (x >= k)
					break;

				if (score.containsKey(row.getValue())) {
					int temp = score.get(row.getValue());

					score.remove(row.getValue());

					score.put(row.getValue(), ++temp);
				} else
					score.put(row.getValue(), 0);

				++x;
			}
		}

		TreeMap<Integer, Document> sorted = new TreeMap<>();

		for (Entry<Document, Integer> entry : score.entrySet())
			sorted.put(entry.getValue(), entry.getKey());
		return sorted;
	}

	private Document getNominal(Document classifiedAs, Document input) {
		input.setClassifiedCategories(classifiedAs);

		return input;
	}

	private void normalizeTrainSet(List<Document> trainSet, FeatureManager fm) {
		if (trainSet.size() == 0)
			return;
		
		logger.info("Knn normalization started");

		int vectorLenght = fm.getVectorLenght();

		MinMax mm = new MinMax();
		logger.info("Knn minmax started");
		this.minMax = mm.getMinMaxVector(trainSet, fm);
		logger.info("Knn minmax ended");
		
		for (Document row : trainSet) {
			Double[] normalizedValues = new Double[vectorLenght];

			Double[] vector = fm.calculateFeatureFor(row);

			for (int y = 0; y < vectorLenght; ++y) {
				normalizedValues[y] = (vector[y] - minMax[y][0])
						/ (minMax[y][1] - minMax[y][0]);
			}

			normalizedVector.put(row, normalizedValues);
			normalizedTrainSet.put(row, normalizedValues);
		}
		
		logger.info("Knn normalization ended");
	}

}
