package ao.ai.ml.algo.supervised.classification.linear.benchmark;

import ao.ai.ml.algo.supervised.classification.decison.RandomForestBinaryClassifier;
import ao.ai.ml.algo.supervised.classification.linear.discriminative.LogisticStochasticGradientAscent;
import ao.ai.ml.algo.supervised.classification.linear.discriminative.MeanBinaryClassifier;
import ao.ai.ml.algo.supervised.classification.model.learner.ext.BinaryLearner;
import ao.ai.ml.algo.supervised.model.example.ext.BinaryClassificationExample;
import ao.ai.ml.algo.supervised.model.example.impl.BinaryClassificationExampleImpl;
import ao.ai.ml.algo.supervised.model.validation.ext.BinaryValidation;
import ao.ai.ml.algo.supervised.model.validation.impl.RandomBinaryValidation;
import ao.ai.ml.model.feature_set.ext.num.NumericalFeatureList;
import ao.ai.ml.model.feature_set.impl.BinaryScalar;
import ao.ai.ml.model.feature_set.impl.FeatureScalar;
import ao.ai.ml.model.feature_type.FeatureType;
import ao.ai.ml.model.feature_type.ext.MutableFeatureSet;
import ao.ai.ml.model.feature_type.impl.FeatureTypeImpl;
import ao.ai.ml.model.feature_type.impl.FeatureTypeSetImpl;
import ao.ai.ml.model.fitness.FitnessMeasure;
import ao.util.math.rand.Rand;
import ao.util.time.Stopwatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;

/**
 * User: aostrovsky
 * Date: 12-Feb-2010
 * Time: 10:53:21 PM
 */
public class Benchmark
{
    //-------------------------------------------------------------------------
    private static final Logger LOG =
        LoggerFactory.getLogger(
        		Benchmark.class);


    //-------------------------------------------------------------------------
    public static void main(String[] args)
    {
        Rand.randomize();

        List<BinaryClassificationExample> examples =
                randomDataSet(100, 0.5);

        LOG.info("benchmarking {} examples", examples.size());
        benchmark(examples, new RandomForestBinaryClassifier());
        benchmark(examples, new MeanBinaryClassifier());
        benchmark(examples, new LogisticStochasticGradientAscent());
    }


    //-------------------------------------------------------------------------
	public static void benchmark(
			List<BinaryClassificationExample> data,
			BinaryLearner                     learner)
	{
		Stopwatch leanTimer = new Stopwatch();
        BinaryValidation<NumericalFeatureList> numValid =
                new RandomBinaryValidation<NumericalFeatureList>(
                        10, 0.1);

        FitnessMeasure fitness =
                numValid.validate(learner, data);

		LOG.info("{} has fitness {}, took {} ", new Object[]{
                    learner, fitness, leanTimer});
	}


	//-------------------------------------------------------------------------
	private static List<BinaryClassificationExample> randomDataSet(
			int size, double positiveProbability)
	{
		LOG.info("generating data {}", size);

        FeatureType inputType  = new FeatureTypeImpl("x");
        FeatureType outputType = new FeatureTypeImpl("y");

        MutableFeatureSet inputFeatureTypes =
                new FeatureTypeSetImpl();
        inputFeatureTypes.add(inputType);

		List<BinaryClassificationExample> data =
                new ArrayList<BinaryClassificationExample>();

        double positiveMean = Rand.nextDouble( 1,  10);
        double negativeMean = Rand.nextDouble(-1, -10);
        double standardDev  = Rand.nextDouble( 1,  5);

		for (int i = 0; i < size; i++)
        {
            boolean isPositive =
                    Rand.nextBoolean( positiveProbability );
            double  xMean      =
                    isPositive ? positiveMean : negativeMean;

            double x = Rand.nextGaussian() * standardDev + xMean;

            data.add(new BinaryClassificationExampleImpl(
                    new FeatureScalar(x, inputType),
                    new BinaryScalar (isPositive, outputType)
            ));
		}

		return data;
	}
}
