package ao.ai.ml.algo.supervised.classification.linear.discriminative;

import ao.ai.ml.algo.supervised.classification.model.learner.ext.BinaryLearner;
import ao.ai.ml.algo.supervised.model.example.Example;
import ao.ai.ml.algo.supervised.model.hypothesis.ext.BinaryClassificationHypothesis;
import ao.ai.ml.algo.supervised.model.hypothesis.impl.LogisticHypothesis;
import ao.ai.ml.model.feature_set.ext.cat.bin.SingleBinaryFeature;
import ao.ai.ml.model.feature_set.ext.num.NumericalFeatureList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.List;

/**
 * User: aostrovsky
 * Date: 16-Feb-2010
 * Time: 9:29:18 PM
 */
public class LogisticStochasticGradientAscent
        implements BinaryLearner
{
    //--------------------------------------------------------------------
    private static final Logger LOG =
            LoggerFactory.getLogger(
            		LogisticStochasticGradientAscent.class);


    //--------------------------------------------------------------------
//    private static final int    MAX_ASCENTS              = 5;
//    private static final int    MAXIMUM_DESCENTS         = 100 * 1000;
    private static final int    MAX_CONSECUTIVE_DESCENTS = 10;
    private static final int    MAX_TOTAL_DESCENTS       = 100;
	private static final int    MAXIMUM_ASCENTS          = 10 * 1000;
	private static final int    CONVERGENCE_DIGITS       = 6;
	private static final int    CONVERGENCE_SIGNIFICANCE = (int)
			Math.pow(10, CONVERGENCE_DIGITS);
	private static final double CONVERGENCE_PRECISION    =
			CONVERGENCE_SIGNIFICANCE / 1000;
	private static final double CONVERGENCE_THRESHOLD    =
			1.0 / CONVERGENCE_SIGNIFICANCE;

	private static final double MIN_LEARNING_RATE	     =
			0.0000000000000001;

    private int totalTrials;


    //-------------------------------------------------------------------------
    @Override
    public BinaryClassificationHypothesis learn(
            List<? extends Example<
                    ? extends NumericalFeatureList,
                    ? extends SingleBinaryFeature>>
                data)
    {
        totalTrials = 0;

        double learningRate = 1.0;
		LogisticHypothesis hypothesis;
		while ((hypothesis = learn(data, learningRate)) == null) {
			learningRate = learningRate / 2;

			if (reachedDeadEnd(learningRate)) {
                LOG.debug("reached dead end in {} trials", totalTrials);
                return null;
            }
		}

        LOG.trace("found answer in {} trials", totalTrials);
		return hypothesis;
    }

    private boolean reachedDeadEnd(double learningRate) {
		return Double.isInfinite( learningRate ) ||
				learningRate < MIN_LEARNING_RATE;
	}


    //--------------------------------------------------------------------
    public LogisticHypothesis learn(
			List<? extends Example<
                     ? extends NumericalFeatureList,
                     ? extends SingleBinaryFeature>>
                   data,
			double learningRate)
	{
        if (data.isEmpty()) {
            return null;
        }
		LOG.trace("learning {} data points at {}",
					data.size(), learningRate);

        Example<? extends NumericalFeatureList,
                ? extends SingleBinaryFeature>
            arbitraryExample = data.get(0);

		double[] params = new double[
                arbitraryExample.input().size() + 1];

		int    i;
        int    totalAscents       = 0;
        int    consecutiveAscents = 0;
		double prevDelta = 0;
		for (i = 0; i < MAXIMUM_ASCENTS; i++) {
            double delta = ascend(params, data, learningRate);
            if (delta > prevDelta) {
                if (consecutiveAscents++ > MAX_CONSECUTIVE_DESCENTS |
                    totalAscents++       > MAX_TOTAL_DESCENTS) {
                    return null;
                }
            }
			prevDelta = delta;
			if (cantLearn(delta)) return null;

			LOG.trace("Ascent {} delta {} :: {}", new Object[]{
					  i, delta, new LogisticHypothesis(
                            params, arbitraryExample )});

			if (delta < CONVERGENCE_THRESHOLD) break;
		}

        LogisticHypothesis hypothesis =
                new LogisticHypothesis( params, arbitraryExample );
		LOG.trace("completed in {} with {} -> {}",
                  new Object[]{i, prevDelta, hypothesis});
		return hypothesis;
	}

	private boolean cantLearn(double delta) {
		return Double.isNaN(delta) ||
				Double.isInfinite(delta);
	}

	private void round(double[] parameters)
	{
		// not 100% sure that this preserves maximum precision
		for (int i = 0; i < parameters.length; i++) {
			parameters[ i ] = (double) Math.round(
					parameters[ i ] * CONVERGENCE_PRECISION)
						  / CONVERGENCE_PRECISION;
		}
	}



	//--------------------------------------------------------------------
	/**
	 * @param params from previous iteration
	 * @param data training set
	 * @param learningRate algorithm parameter
     * @return quadratic mean of parameter deltas
	 */
	private double ascend(
			double[] params,
			List<? extends Example<? extends NumericalFeatureList,
                                   ? extends SingleBinaryFeature>>
                     data,
			double	 learningRate)
	{
        double[] preParams = params.clone();
        LogisticHypothesis hypothesis =
                    new LogisticHypothesis( params, null );

        for (Example<? extends NumericalFeatureList,
                     ? extends SingleBinaryFeature> e : data)
        {
            double actualProb    =
                    e.output().binaryCategory()
                    ? 1.0 : 0.0;
            double predictedProb =
                    hypothesis.probabilityOfPositive( e.input() );
			double residue       =
                    actualProb - predictedProb;

            double pacedResidue = learningRate * residue;
            params[ 0 ] += pacedResidue; // bias term
            for (int f = 1; f < params.length; f++) {
				params[ f ] += pacedResidue *
                               e.input().doubleValue(f - 1);
			}
        }

        totalTrials++;
		return rootMeanSquares(preParams, params);
	}

    private double rootMeanSquares(
			double[] before, double[] after) {
		assert before.length == after.length;

		double deltaSumOfSquares = 0;
		for (int i = 0; i < before.length; i++) {
			double delta = after[i] - before[i];
			deltaSumOfSquares += delta * delta;
		}

		return Math.sqrt(deltaSumOfSquares / before.length);
	}


	//-------------------------------------------------------------------------
	@Override public String toString() {
		return "Logistic Stochastic Gradient Descent";
	}
}
