/*
 * This file is part of ELKI:
 * Environment for Developing KDD-Applications Supported by Index-Structures
 *
 * Copyright (C) 2022
 * ELKI Development Team
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Affero General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
 * GNU Affero General Public License for more details.
 *
 * You should have received a copy of the GNU Affero General Public License
 * along with this program. If not, see <http://www.gnu.org/licenses/>.
 */
package elki.algorithm.statistics;

import java.util.Arrays;
import java.util.Random;

import elki.Algorithm;
import elki.data.DoubleVector;
import elki.data.NumberVector;
import elki.data.type.TypeInformation;
import elki.data.type.TypeUtil;
import elki.database.ids.DBIDIter;
import elki.database.ids.DBIDRef;
import elki.database.ids.DBIDUtil;
import elki.database.ids.ModifiableDBIDs;
import elki.database.query.QueryBuilder;
import elki.database.query.knn.KNNSearcher;
import elki.database.relation.Relation;
import elki.database.relation.RelationUtil;
import elki.distance.NumberVectorDistance;
import elki.distance.minkowski.EuclideanDistance;
import elki.logging.Logging;
import elki.logging.Logging.Level;
import elki.logging.statistics.DoubleStatistic;
import elki.logging.statistics.LongStatistic;
import elki.math.MathUtil;
import elki.math.MeanVariance;
import elki.math.statistics.distribution.BetaDistribution;
import elki.utilities.documentation.Reference;
import elki.utilities.exceptions.AbortException;
import elki.utilities.optionhandling.OptionID;
import elki.utilities.optionhandling.Parameterizer;
import elki.utilities.optionhandling.WrongParameterValueException;
import elki.utilities.optionhandling.constraints.CommonConstraints;
import elki.utilities.optionhandling.parameterization.Parameterization;
import elki.utilities.optionhandling.parameters.DoubleListParameter;
import elki.utilities.optionhandling.parameters.IntParameter;
import elki.utilities.optionhandling.parameters.ObjectParameter;
import elki.utilities.optionhandling.parameters.RandomParameter;
import elki.utilities.random.RandomFactory;

/**
 * The Hopkins Statistic of Clustering Tendency measures the probability that a
 * data set is generated by a uniform data distribution.
 * <p>
 * The statistic compares the ratio of the 1NN distance for objects from the
 * data set compared to the 1NN distances of uniform distributed objects.
 * <p>
 * Reference:
 * <p>
 * B. Hopkins, J. G. Skellam<br>
 * A new method for determining the type of distribution of plant
 * individuals<br>
 * Annals of Botany, 18(2), 213-227.
 *
 * @author Lisa Reichert
 * @author Erich Schubert
 * @since 0.7.0
 */
// TODO: allow using more than one k
@Reference(authors = "B. Hopkins, J. G. Skellam", //
    title = "A new method for determining the type of distribution of plant individuals", //
    booktitle = "Annals of Botany, 18(2), 213-227", //
    url = "https://doi.org/10.1093/oxfordjournals.aob.a083391", //
    bibkey = "doi:10.1093/oxfordjournals.aob.a083391")
public class HopkinsStatisticClusteringTendency implements Algorithm {
  /**
   * The logger for this class.
   */
  private static final Logging LOG = Logging.getLogger(HopkinsStatisticClusteringTendency.class);

  /**
   * The parameter sampleSizes
   */
  protected int sampleSize;

  /**
   * Number of repetitions
   */
  protected int rep;

  /**
   * Nearest neighbor to use.
   */
  protected int k;

  /**
   * Random generator seeding.
   */
  protected RandomFactory random;

  /**
   * Stores the maximum in each dimension.
   */
  private double[] maxima = new double[0];

  /**
   * Stores the minimum in each dimension.
   */
  private double[] minima = new double[0];

  /**
   * Distance function used.
   */
  protected NumberVectorDistance<? super NumberVector> distance;

  /**
   * Constructor.
   *
   * @param distance Distance function
   * @param samplesize Sample size
   * @param random Random generator
   * @param rep Number of repetitions
   * @param k Nearest neighbors to use
   * @param minima Data space minima, may be {@code null} (get from data).
   * @param maxima Data space minima, may be {@code null} (get from data).
   */
  public HopkinsStatisticClusteringTendency(NumberVectorDistance<? super NumberVector> distance, int samplesize, RandomFactory random, int rep, int k, double[] minima, double[] maxima) {
    super();
    this.distance = distance;
    this.sampleSize = samplesize;
    this.random = random;
    this.rep = rep;
    this.k = k;
    this.minima = minima;
    this.maxima = maxima;
  }

  @Override
  public TypeInformation[] getInputTypeRestriction() {
    return TypeUtil.array(TypeUtil.NUMBER_VECTOR_FIELD);
  }

  /**
   * Compute the Hopkins statistic for a vector relation.
   *
   * @param relation Relation
   * @return Hopkins statistic
   */
  public Double run(Relation<NumberVector> relation) {
    final int dim = RelationUtil.dimensionality(relation);
    final QueryBuilder<NumberVector> qb = new QueryBuilder<>(relation, distance);
    KNNSearcher<NumberVector> knnQuery = qb.kNNByObject(k + 1);
    KNNSearcher<DBIDRef> intQuery = qb.kNNByDBID(k + 1);

    final double[] min = new double[dim], extend = new double[dim];
    initializeDataExtends(relation, dim, min, extend);

    if(!LOG.isStatistics()) {
      LOG.warning("This algorithm must be used with at least logging level " + Level.STATISTICS);
    }

    MeanVariance hmean = new MeanVariance(), umean = new MeanVariance(),
        wmean = new MeanVariance();
    // compute the hopkins value several times and use the average value for a
    // more stable result
    for(int j = 0; j < this.rep; j++) {
      // Compute NN distances for random objects from within the database
      double w = computeNNForRealData(intQuery, relation, dim);
      // Compute NN distances for randomly created new uniform objects
      double u = computeNNForUniformData(knnQuery, min, extend);
      // compute hopkins statistik
      double h = u / (u + w); // = a / (1+a)
      hmean.put(h);
      umean.put(u);
      wmean.put(w);
    }
    final String prefix = this.getClass().getName();
    LOG.statistics(new LongStatistic(prefix + ".samplesize", sampleSize));
    LOG.statistics(new LongStatistic(prefix + ".dim", dim));
    LOG.statistics(new LongStatistic(prefix + ".hopkins.nearest-neighbor", k));
    LOG.statistics(new DoubleStatistic(prefix + ".hopkins.h.mean", hmean.getMean()));
    LOG.statistics(new DoubleStatistic(prefix + ".hopkins.u.mean", umean.getMean()));
    LOG.statistics(new DoubleStatistic(prefix + ".hopkins.w.mean", wmean.getMean()));
    if(rep > 1) {
      LOG.statistics(new DoubleStatistic(prefix + ".hopkins.h.std", hmean.getSampleStddev()));
      LOG.statistics(new DoubleStatistic(prefix + ".hopkins.u.std", umean.getSampleStddev()));
      LOG.statistics(new DoubleStatistic(prefix + ".hopkins.w.std", wmean.getSampleStddev()));
    }
    // Evaluate:
    double x = hmean.getMean();
    // See Hopkins for a proof that x is supposedly Beta distributed.
    double ix = BetaDistribution.regularizedIncBeta(x, sampleSize, sampleSize);
    double p = (x > .5) ? (1. - ix) : ix;
    LOG.statistics(new DoubleStatistic(prefix + ".hopkins.p", p));
    return p;
  }

  /**
   * Search nearest neighbors for <em>real</em> data members.
   *
   * @param knnQuery KNN query
   * @param relation Data relation
   * @param dim Dimensionality
   * @return Aggregated 1NN distances
   */
  protected double computeNNForRealData(final KNNSearcher<DBIDRef> knnQuery, Relation<NumberVector> relation, final int dim) {
    double w = 0.;
    ModifiableDBIDs dataSampleIds = DBIDUtil.randomSample(relation.getDBIDs(), sampleSize, random);
    for(DBIDIter iter = dataSampleIds.iter(); iter.valid(); iter.advance()) {
      final double kdist = knnQuery.getKNN(iter, k + 1).getKNNDistance();
      w += MathUtil.powi(kdist, dim);
    }
    return w;
  }

  /**
   * Search nearest neighbors for <em>artificial, uniform</em> data.
   *
   * @param knnQuery KNN query
   * @param min Data minima
   * @param extend Data extend
   * @return Aggregated 1NN distances
   */
  protected double computeNNForUniformData(final KNNSearcher<NumberVector> knnQuery, final double[] min, final double[] extend) {
    final Random rand = random.getSingleThreadedRandom();
    final int dim = min.length;

    double[] buf = new double[dim];

    double u = 0.;
    for(int i = 0; i < sampleSize; i++) {
      // New random vector
      for(int d = 0; d < buf.length; d++) {
        buf[d] = min[d] + (rand.nextDouble() * extend[d]);
      }
      double kdist = knnQuery.getKNN(DoubleVector.wrap(buf), k).getKNNDistance();
      u += MathUtil.powi(kdist, dim);
    }
    return u;
  }

  /**
   * Initialize the uniform sampling area.
   *
   * @param relation Data relation
   * @param dim Dimensionality
   * @param min Minima output array (preallocated!)
   * @param extend Data extend output array (preallocated!)
   */
  protected void initializeDataExtends(Relation<NumberVector> relation, int dim, double[] min, double[] extend) {
    assert (min.length == dim && extend.length == dim);
    // if no parameter for min max compute min max values for each dimension
    // from dataset
    if(minima == null || maxima == null || minima.length == 0 || maxima.length == 0) {
      double[][] minmax = RelationUtil.computeMinMax(relation);
      final double[] dmin = minmax[0], dmax = minmax[1];
      for(int d = 0; d < dim; d++) {
        min[d] = dmin[d];
        extend[d] = dmax[d] - dmin[d];
      }
      return;
    }
    if(minima.length == dim) {
      System.arraycopy(minima, 0, min, 0, dim);
    }
    else if(minima.length == 1) {
      Arrays.fill(min, minima[0]);
    }
    else {
      throw new AbortException("Invalid minima specified: expected " + dim + " got minima dimensionality: " + minima.length);
    }
    if(maxima.length == dim) {
      for(int d = 0; d < dim; d++) {
        extend[d] = maxima[d] - min[d];
      }
      return;
    }
    else if(maxima.length == 1) {
      for(int d = 0; d < dim; d++) {
        extend[d] = maxima[0] - min[d];
      }
      return;
    }
    else {
      throw new AbortException("Invalid maxima specified: expected " + dim + " got maxima dimensionality: " + maxima.length);
    }
  }

  /**
   * Parameterization class.
   *
   * @author Lisa Reichert
   */
  public static class Par implements Parameterizer {
    /**
     * Sample size.
     */
    public static final OptionID SAMPLESIZE_ID = new OptionID("hopkins.samplesize", "Number of object / random samples to analyze.");

    /**
     * Parameter to specify the number of repetitions of computing the hopkins
     * value.
     */
    public static final OptionID REP_ID = new OptionID("hopkins.rep", "The number of times to repeat the experiment (default: 1)");

    /**
     * Parameter to specify the random generator seed.
     */
    public static final OptionID SEED_ID = new OptionID("hopkins.seed", "The random number generator.");

    /**
     * Parameter for minimum.
     */
    public static final OptionID MINIMA_ID = new OptionID("hopkins.min", "Minimum values in each dimension. If no value is specified, the minimum value in each dimension will be used. If only one value is specified, this value will be used for all dimensions.");

    /**
     * Parameter for maximum.
     */
    public static final OptionID MAXIMA_ID = new OptionID("hopkins.max", "Maximum values in each dimension. If no value is specified, the maximum value in each dimension will be used. If only one value is specified, this value will be used for all dimensions.");

    /**
     * Parameter for k.
     */
    public static final OptionID K_ID = new OptionID("hopkins.k", "Nearest neighbor to use for the statistic");

    /**
     * The distance function to use.
     */
    protected NumberVectorDistance<? super NumberVector> distance;

    /**
     * Sample size.
     */
    protected int sampleSize = 0;

    /**
     * Number of repetitions.
     */
    protected int rep = 1;

    /**
     * Nearest neighbor number.
     */
    protected int k = 1;

    /**
     * Random source.
     */
    protected RandomFactory random;

    /**
     * Stores the maximum in each dimension.
     */
    protected double[] maxima = null;

    /**
     * Stores the minimum in each dimension.
     */
    protected double[] minima = null;

    @Override
    public void configure(Parameterization config) {
      new ObjectParameter<NumberVectorDistance<? super NumberVector>>(Algorithm.Utils.DISTANCE_FUNCTION_ID, NumberVectorDistance.class, EuclideanDistance.class) //
          .grab(config, x -> distance = x);
      new IntParameter(REP_ID, 1) //
          .addConstraint(CommonConstraints.GREATER_EQUAL_ONE_INT) //
          .grab(config, x -> rep = x);
      new IntParameter(K_ID, 1) //
          .addConstraint(CommonConstraints.GREATER_EQUAL_ONE_INT) //
          .grab(config, x -> k = x);
      new IntParameter(SAMPLESIZE_ID) //
          .addConstraint(CommonConstraints.GREATER_EQUAL_ONE_INT) //
          .grab(config, x -> sampleSize = x);
      new RandomParameter(SEED_ID).grab(config, x -> random = x);
      DoubleListParameter minimaP = new DoubleListParameter(MINIMA_ID) //
          .setOptional(true);
      minimaP.grab(config, x -> minima = x.clone());
      DoubleListParameter maximaP = new DoubleListParameter(MAXIMA_ID) //
          .setOptional(minima == null);
      maximaP.grab(config, x -> maxima = x.clone());
      // Non-formalized parameter constraint:
      if(minima != null && maxima != null && minima.length != maxima.length) {
        config.reportError(new WrongParameterValueException(minimaP, "and", maximaP, "must have the same number of values."));
      }
    }

    @Override
    public HopkinsStatisticClusteringTendency make() {
      return new HopkinsStatisticClusteringTendency(distance, sampleSize, random, rep, k, minima, maxima);
    }
  }
}
