/**
 * Copyright 2012 Brigham Young University
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package edu.byu.nlp.cluster.mom;

import java.util.logging.Logger;

import org.apache.commons.math3.random.RandomGenerator;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;

import edu.byu.nlp.cluster.BasicProbabilisticClustering;
import edu.byu.nlp.cluster.Dataset;
import edu.byu.nlp.cluster.ProbabilisticClusterer;
import edu.byu.nlp.cluster.ProbabilisticClustering;
import edu.byu.nlp.cluster.em.ParameterInitializer;
import edu.byu.nlp.data.SparseFeatureVector;
import edu.byu.nlp.math.optimize.ConvergenceChecker;
import edu.byu.nlp.math.optimize.ConvergenceCheckers;
import edu.byu.nlp.math.optimize.IterativeOptimizer;
import edu.byu.nlp.math.optimize.IterativeOptimizer.Optimizable;
import edu.byu.nlp.math.optimize.ValueAndObject;
import edu.byu.nlp.stats.RandomGenerators;
import edu.byu.nlp.util.Enumeration;
import edu.byu.nlp.util.Iterables2;

/**
 * @author rah67
 *
 */
public class UncollapsedClusterer implements ProbabilisticClusterer {

	private static Logger logger = Logger.getLogger(UncollapsedClusterer.class.getName());
	
	private final double alpha;
	private final double beta;
	private final ParameterInitializer<MoMParameters> initializer;
	private final boolean isHard;
	private final RandomGenerator rnd;

	private final IterativeOptimizer optimizer;

	/**
	 * Constructs a clusterer based on the uncollapsed distribution. If isHard is false, then sampling is performed,
	 * otherwise, coordinate-wise gradient ascent is performed.
	 */
	public UncollapsedClusterer(double alpha, double beta, IterativeOptimizer optimizer,
			ParameterInitializer<MoMParameters> initializer, boolean isHard, RandomGenerator rnd) {
		if (isHard) {
			Preconditions.checkArgument(alpha > 1.0, "alpha (%s) must be strictly greater than 1.0", alpha);
			Preconditions.checkArgument(beta > 1.0, "beta (%s) must be strictly greater than 1.0", beta);
		} else {
			Preconditions.checkArgument(alpha > 0.0, "alpha (%s) must be strictly greater than 0.0", alpha);
			Preconditions.checkArgument(beta > 0.0, "beta (%s) must be strictly greater than 0.0", beta);
		}
		Preconditions.checkNotNull(optimizer);
		Preconditions.checkNotNull(initializer);
		Preconditions.checkNotNull(rnd);

		this.alpha = isHard ? alpha - 1 : alpha;
		this.beta = isHard ? beta - 1 : beta;
		this.optimizer = optimizer;
		this.initializer = initializer;
		this.isHard = isHard;
		this.rnd = rnd;
	}

	private static final class UncollapsedOptimizable implements Optimizable<UncollapsedParameters> {

		private final Assigner assigner;
		
		public UncollapsedOptimizable(Assigner assigner) {
			this.assigner = assigner;
		}
		
		/** {@inheritDoc} */
		@Override
		public ValueAndObject<UncollapsedParameters> computeNext(UncollapsedParameters prevParams) {
			prevParams.nextAssignments(assigner);
			return new ValueAndObject<UncollapsedParameters>(prevParams.logJoint(), prevParams);
		}
		
	}

	public ProbabilisticClustering cluster(Dataset data, int numClusters) {
		// FIXME : use the actual parameters (then I wouldn't have to call maximize first)
		int[] initialY = initializeAssignments(data, numClusters);
		ConvergenceChecker c = ConvergenceCheckers.or(ConvergenceCheckers.relativePercentChange(1e-6),
				ConvergenceCheckers.maxIterations(250));
		
		UncollapsedParameters sample = UncollapsedParameters.newInstance(data, initialY, numClusters, alpha, beta,
				new IterativeOptimizer(c));
		
		Assigner assigner = null;
		if (isHard) {
			assigner = new ModeAssigner();
		} else {
			assigner = new SampleAssigner(rnd);
		}
		UncollapsedOptimizable optimizable = new UncollapsedOptimizable(assigner);
		for (int i = 0; i < 50; i++) {
			logger.info(String.format("Iteration %d: %f", i, sample.logJoint()));
			if (i >= 15) {
				sample.optimizeBeta();
			}
			sample.nextAssignments(assigner);
		}
		ValueAndObject<UncollapsedParameters> sampleAndLogJoint =
				new ValueAndObject<UncollapsedParameters>(sample.logJoint(), sample);
		/*
		// FIXME : would like to be able to return the "best" with the mutable sample
		ValueAndObject<UncollapsedParameters> sampleAndLogJoint = optimizer.optimize(optimizable, ReturnType.LAST,
				isHard, sample);
				*/
		// FIXME : what's the predictive distribution?
		return new BasicProbabilisticClustering(data, numClusters, sampleAndLogJoint.getObject().getY(),
				sampleAndLogJoint.getObject().getModel(), sampleAndLogJoint.getValue());
	}
	
	private int[] initializeAssignments(Dataset data, int numClusters) {
		MoMParameters params = initializer.initialize(data, numClusters);
		int[] initialY = new int[data.unlabeledData().size()];
		for (Enumeration<SparseFeatureVector> e : Iterables2.enumerate(data.unlabeledData())) {
			double[] dist = params.logJoint(e.getElement());
			initialY[e.getIndex()] = RandomGenerators.nextIntUnnormalizedLogProbs(rnd, dist);
		}
		return initialY;
	}
	
	@VisibleForTesting
	double getAlpha() {
		return alpha;
	}
	
	@VisibleForTesting
	double getBeta() {
		return beta;
	}

}
