package com.yahoo.labs.snow.solver;

import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.logging.ProgressLogger;

import java.util.Vector;

import com.yahoo.labs.snow.ProblemInstance;
import com.yahoo.labs.snow.Snowflake;

/**
 * A solver that uses restricted Hierarchical Agglomerative Clustering (HAC).
 * 
 * It starts with each element as a single snowflake, then merges those that are more compatible,
 * honoring the budget and coverage constraints.
 * 
 * @author chato
 * 
 */
public class RestrictedHACSolver extends MultiplicativeEffortProduceAndChooseSolver {
	
	public RestrictedHACSolver(ProblemInstance problem) {
		super(problem);
	}

	@Override
	public Vector<Snowflake> produceManySnowflakes(int numToProduce) {
		if (problem.numNodes() < numToProduce) {
			throw new IllegalArgumentException("Too few nodes");
		}

		// Put each item in its own cluster
		Int2ObjectOpenHashMap<IntOpenHashSet> clustering = new Int2ObjectOpenHashMap<IntOpenHashSet>();
		for (int node : problem.getIds()) {

			// Make sure all singleton clusters are within budget
			if (problem.getCost(node) <= problem.getBudget()) {
				clustering.put(node, new IntOpenHashSet(new int[] { node }));
			}
		}

		// Merge iteratively
		boolean merged = true;
		ProgressLogger pl = new ProgressLogger(logger, ProgressLogger.ONE_SECOND, "merges");
		pl.expectedUpdates = clustering.size() - numToProduce;
		pl.start( "Merging, started from " + clustering.size() + " clusters, reducing to " + numToProduce );
		while (merged && clustering.size() > numToProduce) {
			//logger.debug("Have the following clusters: " + clustering.values().toString().substring(0,40) + "..." );
			merged = tryMerge(clustering);
			pl.update();
		}
		pl.stop();
		if( merged == false ) {
			Solver.logger.info("Could not merge more, stopped at " + clustering.size() );
		}
		

		Vector<Snowflake> solution = new Vector<Snowflake>();
		for (IntOpenHashSet set : clustering.values()) {
			solution.add(new Snowflake(set, problem));
		}

		return solution;
	}

	/**
	 * Tries to perform a merge of clusters; this may not succeed if no merge yields a valid snowflake.
	 * 
	 * @param clustering the clustering, which is modified if a merge succeeds
	 * @return true iff a merge was possible and done
	 */
	private boolean tryMerge(Int2ObjectOpenHashMap<IntOpenHashSet> clustering) {

		// Compute all distances
		int bestC1 = -1;
		int bestC2 = -1;
		double maxCompatibility = -1.0;

		// Note: this scans clusters left to right; a randomized ordering could be used
		int[] keys = clustering.keySet().toArray(new int[] {});
		for (int i=0; i<keys.length; i++ ) {
			int c1 = keys[i];
			IntOpenHashSet cluster1 = clustering.get(c1);
			for( int j=i+1; j<keys.length; j++ ) {
				int c2 = keys[j];
				IntOpenHashSet cluster2 = clustering.get(c2);
				// Check if these can be merged
				if (checkBudgetAndCoverageConstraint(cluster1, cluster2)) {
					// If they can be merged, measure their compatibility
					double compatibility = problem.maxPairwiseCompatibility(cluster1, cluster2);
					if (compatibility > maxCompatibility) {
						bestC1 = c1;
						bestC2 = c2;
						maxCompatibility = compatibility;
					}
				}
			}
		}

		if (bestC1 >= 0 && bestC2 >= 0) {

			// Copy elements from C2 into C1
			for (int element : clustering.get(bestC2)) {
				clustering.get(bestC1).add(element);
			}
			clustering.remove(bestC2);

			// Report merging was successful
			return true;
		} else {

			// Report merging failed
			return false;
		}
	}
}
