package cn.jathlon.ml;

import java.util.Map;

/**
 * Some similarity method.
 * 
 * @author tuxiao.czz
 * 
 */
public class Similarity {

	/**
	 * cosine Similarity
	 */
	public static double cosineSimilarity(double[] v1, double[] v2) {
		return pointMulti(v1, v2) / sqrtMulti(v1, v2);
	}

	private static double sqrtMulti(double[] v1, double[] v2) {
		double result = 0;
		result = squares(v1) * squares(v2);
		result = Math.sqrt(result);
		return result;
	}

	private static double squares(double[] v) {
		double result = 0;
		for (Double d : v) {
			result += d * d;
		}
		return result;
	}

	public static double pointMulti(double[] v1, double[] v2) {
		double result = 0;
		for (int i = 0; i < v1.length; i++) {
			result += v1[i] * v2[i];
		}
		return result;
	}

	/**
	 * pearson Similarity
	 * 
	 * @param scores1
	 * @param scores2
	 * @return
	 */
	public static double pearsonSimilarity(double[] scores1, double[] scores2) {
		double result = 0;
		double sum_sq_x = 0;
		double sum_sq_y = 0;
		double sum_coproduct = 0;
		double mean_x = scores1[0];
		double mean_y = scores2[0];
		for (int i = 2; i < scores1.length + 1; i += 1) {
			double sweep = Double.valueOf(i - 1) / i;
			double delta_x = scores1[i - 1] - mean_x;
			double delta_y = scores2[i - 1] - mean_y;
			sum_sq_x += delta_x * delta_x * sweep;
			sum_sq_y += delta_y * delta_y * sweep;
			sum_coproduct += delta_x * delta_y * sweep;
			mean_x += delta_x / i;
			mean_y += delta_y / i;
		}
		double pop_sd_x = (double) Math.sqrt(sum_sq_x / scores1.length);
		double pop_sd_y = (double) Math.sqrt(sum_sq_y / scores1.length);
		double cov_x_y = sum_coproduct / scores1.length;
		result = cov_x_y / (pop_sd_x * pop_sd_y);
		return result;
	}

	public static Map<String, Double> mapValuePlusPlus(Map<String, Double> map,
			String key) {
		Double value = map.containsKey(key) ? map.get(key) + 1.0 : 1.0;
		map.put(key, value);
		return map;
	}

	// gsc
	/**
	 * Returns the KL divergence, K(p1 || p2).
	 * 
	 * The log is w.r.t. base 2.
	 * <p>
	 * 
	 * *Note*: If any value in <tt>p2</tt> is <tt>0.0</tt> then the
	 * KL-divergence is <tt>infinite</tt>.
	 * 
	 */
	public static double klDivergence(double[] p1, double[] p2) {
		assert (p1.length == p2.length);
		double klDiv = 0.0;
		for (int i = 0; i < p1.length; ++i) {
			if (p1[i] == 0) {
				continue;
			}
			if (p2[i] == 0) {
				return Double.POSITIVE_INFINITY;
			}
			klDiv += p1[i] * Math.log(p1[i] / p2[i]);
		}
		return klDiv / Math.log(2); // moved this division out of the loop -DM
	}

	/**
	 * Returns the Jensen-Shannon divergence.
	 */
	public static double jensenShannonDivergence(double[] p1, double[] p2) {
		assert (p1.length == p2.length);
		double[] average = new double[p1.length];
		for (int i = 0; i < p1.length; ++i) {
			average[i] += (p1[i] + p2[i]) / 2;
		}
		return (klDivergence(p1, average) + klDivergence(p2, average)) / 2;
	}

}
