/**
 * Copyright 2011 Brigham Young University
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package edu.byu.nlp.ml;

import org.apache.commons.math3.analysis.DifferentiableMultivariateFunction;
import org.apache.commons.math3.analysis.MultivariateFunction;
import org.apache.commons.math3.analysis.MultivariateVectorFunction;
import org.apache.commons.math3.exception.DimensionMismatchException;
import org.apache.commons.math3.exception.OutOfRangeException;
import org.apache.commons.math3.linear.ArrayRealVector;
import org.apache.commons.math3.linear.RealVector;

/**
 * Factory methods for creating common regularizers.
 * 
 * @author rah67
 *
 */
public class Regularizers {

	// Uninstantiable
	private Regularizers() {}
	
	/**
	 * Although the derivative is undefined at zero, we return 0 which is in the subderivative. 
	 */
	private static class L1Regularizer implements DifferentiableMultivariateFunction {
		
		@Override
		public double value(double[] weights) {
			return new ArrayRealVector(weights, false).getL1Norm();
		}

		@Override
		public MultivariateVectorFunction gradient() {
			return new MultivariateVectorFunction() {
				@Override
				public double[] value(double[] weights) {
					double[] gradient = new double[weights.length];
					for (int i = 0; i < gradient.length; i++) { 
						// Technically, the derivative is undefined at zero.
						gradient[i] = Math.signum(weights[i]);
					}
					return gradient;
				}
			};
		}

		@Override
		public MultivariateFunction partialDerivative(final int i) {
			return new MultivariateFunction() {
				@Override
				public double value(double[] weights) {
					// Technically, the derivative is undefined at zero.
					return Math.signum(weights[i]);
				}
			};
		}
	}
	
	public static DifferentiableMultivariateFunction l1Regularizer() {
		return new L1Regularizer();
	}

	private static class L2Regularizer implements DifferentiableMultivariateFunction {
		
		@Override
		public double value(double[] weights) {
			// Not using new ArrayRealVector(weights, false).getNorm() in order to avoid square root and subsequent
			// square
			double norm = 0.0;
			for (double d : weights) {
				norm += d * d;
			}
			return norm;
		}

		@Override
		public MultivariateVectorFunction gradient() {
			return new MultivariateVectorFunction() {
				@Override
				public double[] value(double[] weights) {
					double[] gradient = new double[weights.length];
					for (int i = 0; i < gradient.length; i++) { 
						gradient[i] = 2.0 * weights[i];
					}
					return gradient;
				}
			};
		}

		@Override
		public MultivariateFunction partialDerivative(final int i) {
			return new MultivariateFunction() {
				@Override
				public double value(double[] weights) {
					return 2.0 * weights[i];
				}
			};
		}
	}

	public static DifferentiableMultivariateFunction l2Regularizer() {
		// Not re-using the more general CenteredL2Regularizer or IndependentGaussiansRegularizer has two advantages:
		// First and foremost, we don't have to pre-specify a dimension for the mean vector, since L2 regularization can
		// take place on any size vector. Second, there are slight efficiency benefits, though these may be minimal.
		return new L2Regularizer();
	}

	private static class CenteredL2Regularizer implements DifferentiableMultivariateFunction {
		
		private final RealVector center;

		public CenteredL2Regularizer(RealVector center) {
			this.center = center;
		}

		/**
		 * \sum_i (weights[i] - center[i])^2
		 */
		@Override
		public double value(double[] weights) {
			// Euclidean distance is symmetric, so we use center because it may have been overridden for efficiency.
			double distance = center.getDistance(new ArrayRealVector(weights, false));
			// If we did things manually, we could avoid the square root from center.getDistance(weights) and this
			// subsequent square, at the expensive of calling center.getEntry(i) or center.iterator(). This way the
			// efficiency is entirely up to the underlying RealVector implementation. Besides, the extra square root
			// and square are not likely to show up in a profiler; if they do, we can optimize later.
			return distance * distance;
		}

		@Override
		public MultivariateVectorFunction gradient() {
			return new MultivariateVectorFunction() {
				
				/**
				 * d/dw_i \sum_j (weights[j] - center[j])^2 = d/dw_i (weights[i]^2 - 2*weights[i]*center[i])
				 *                                          = 2 * weights[i] - 2 * center[i]
				 *                                  
				 * @throws DimensionMismatchException if weights.length != center.getDimension()
				 */
				@Override
				public double[] value(double[] weights) {
					double[] gradient = new double[weights.length];
					for (int i = 0; i < gradient.length; i++) { 
						gradient[i] = 2.0 * (weights[i] - center.getEntry(i));
					}
					return gradient;
				}
			};
		}

		/**
		 * {@inheritDoc}
		 * 
		 * @throws OutOfRangeException if i > center.getDimension().
		 */
		@Override
		public MultivariateFunction partialDerivative(final int i) {
			final double mean = center.getEntry(i);
			return new MultivariateFunction() {
				
				/**                                  
				 * @throws DimensionMismatchException if weights.length != center.getDimension()
				 */
				 @Override
				public double value(double[] weights) {
					if (weights.length != center.getDimension()) {
						throw new DimensionMismatchException(weights.length, center.getDimension());
					}
					return 2.0 * (weights[i] - mean);
				}
			};
		}
	}

	/**
	 * An L2 regularizer centered at a value rather than 0. Can be thought of as a prior consisting of independent
	 * Gaussians with means equivalent to the specified component of center.
	 * 
	 * Note that at runtime, if a weight vector is provided of different dimensions than center, a
	 * DimensionMismatchException will be thrown.
	 * 
	 * The returned class assumes ownership of the center vector, i.e. does not copy it. Normally, it shouldn't be
	 * changed after this call, especially in a multi-threaded environment.
	 */
	public static DifferentiableMultivariateFunction centeredL2Regularizer(RealVector center) {
		return new CenteredL2Regularizer(center);
	}
	
	private static class IndependentGaussianRegularizer implements DifferentiableMultivariateFunction {
		
		private final RealVector means;
		private final RealVector variances;

		public IndependentGaussianRegularizer(RealVector means, RealVector variances) {
			this.means = means;
			this.variances = variances;
		}

		/**
		 * \sum_i (weights[i] - means[i])^2 / variances[i]
		 * 
		 * @throws DimensionMismatchException if weights.length != means.getDimension (also variances.getDimension()) 
		 */
		@Override
		public double value(double[] weights) {
			if (weights.length != means.getDimension()) {
				throw new DimensionMismatchException(weights.length, means.getDimension());
			}
			
			double value = 0.0;
			for (int i = 0; i < weights.length; i++) {
				double diff = weights[i] - means.getEntry(i);
				value += diff * diff / (variances.getEntry(i));
			}
			return value;
		}

		@Override
		public MultivariateVectorFunction gradient() {
			return new MultivariateVectorFunction() {
				
				/**
				 * d/dw_i \sum_j (weights[j] - means[j])^2 / variances[i]
				 *     = d/dw_i (weights[i]^2 - 2*weights[i]*means[i]) / variances[i]
				 *     = (2*weights[i] - 2*means[i]) / variances[i]
				 *     = 2 * (weights[i] - means[i]) / variances[i]
				 *                                  
				 * @throws DimensionMismatchException if weights.length != means.getDimension()
				 */
				@Override
				public double[] value(double[] weights) {
					double[] gradient = new double[weights.length];
					for (int i = 0; i < gradient.length; i++) { 
						gradient[i] = 2.0 * (weights[i] - means.getEntry(i)) / variances.getEntry(i);
					}
					return gradient;
				}
			};
		}

		/**
		 * {@inheritDoc}
		 * 
		 * @throws OutOfRangeException if i > meqns.getDimension().
		 */
		@Override
		public MultivariateFunction partialDerivative(final int i) {
			final double mean = means.getEntry(i);
			final double variance = variances.getEntry(i);
			return new MultivariateFunction() {
				
				/**                                  
				 * @throws DimensionMismatchException if weights.length != means.getDimension()
				 */
				 @Override
				public double value(double[] weights) {
					if (weights.length != means.getDimension()) {
						throw new DimensionMismatchException(weights.length, means.getDimension());
					}
					return 2.0 * (weights[i] - mean) / variance;
				}
			};
		}
	}

	/**
	 * Returns a regularizer that treats each weight as if it had an independent Gaussian prior with it's own mean and
	 * variance. When the priors share a variance, this is equivalent to centeredL2Regularizer(mean); if in addition the
	 * means are all zero, then this is equivalent to l2Regularizer(). Note that the variance is NOT multiplies by two
	 * as per the pdf for the Gaussian; if this is desired, set alpha to 2.0.
	 * 
	 * Note that at runtime, if a weight vector is provided of different dimensions than mean, a
	 * DimensionMismatchException will be thrown from any of the methods of the returned
	 * {@code DifferentiableMultivariateRealFunction}.
	 * 
	 * The returned class assumes ownership of the mean and variance vectors, i.e. does not copy them. Normally, they 
	 * shouldn't be changed after this call, especially in a multi-threaded environment.
	 * 
	 * @throws DimensionMismatchException if the dimensions of means and variances differ
	 */
	public static DifferentiableMultivariateFunction independentGaussiansRegularizer(
			RealVector means, RealVector variances) {
		if (means.getDimension() != variances.getDimension()) {
			throw new DimensionMismatchException(variances.getDimension(), means.getDimension());
		}
		return new IndependentGaussianRegularizer(means, variances);
	}
}
