/*
 * Copyright 2008 FBK (http://www.fbk.eu/)
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.fbk.it.hlt.jlsi;

import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;

import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.fbk.it.hlt.jlsi.data.SparseBinaryMatrixFileReader;

import cern.colt.matrix.DoubleMatrix1D;
import cern.colt.matrix.DoubleMatrix2D;
import cern.colt.matrix.impl.DenseDoubleMatrix1D;

/**
 * Term symilarity in the VSM using the cosine.
 * 
 * @author Claudio Giuliano
 * @version %I%, %G%
 * @since 1.0
 * @see CosineTermSimilarity
 */
public class CosineTermSimilarity extends AbstractTermSimilarity {
	/**
	 * Define a static logger variable so that it references the Logger instance
	 * named <code>CosineTermSimilarity</code>.
	 */
	static Logger logger = Logger.getLogger(CosineTermSimilarity.class
			.getName());

	/**
	 * to do.
	 */
	private DoubleMatrix2D X;

	/**
	 * to do.
	 */
	private DoubleMatrix1D Iidf;

	//
	private TermIndex termIndex;

	//
	private DocumentIndex documentIndex;

	//
	private double threshold;

	//
	private int size;

	/**
	 * Constructs a <code>CosineTermSimilarity</code> object.
	 */
	public CosineTermSimilarity(File XFile, File rowFile, File colFile,
			File dfFile, double threshold, int size) throws IOException {
		this.threshold = threshold;
		termIndex = TermIndex.getTermIndex();
		termIndex.read(new FileReader(rowFile));

		documentIndex = DocumentIndex.getDocumentIndex();
		documentIndex.read(new FileReader(colFile));

		// number of documents - documentSet returns the set of keys
		int l = documentIndex.documentSet().size();

		Vocabulary voc = new Vocabulary();
		voc.read(new FileReader(dfFile));
		createIidf(voc, l);
		// logger.info(Iidf.toString());
		// logger.info(voc.toString());

		SparseBinaryMatrixFileReader matrixFileReader = new SparseBinaryMatrixFileReader(
				XFile);
		X = (DoubleMatrix2D) matrixFileReader.read();
		// logger.info(X.toString());
		normalize();
		// logger.info(X.toString());
		idf();
		// logger.info(X.toString());

	} // end constructor

	// creates a 1-D matrix.
	// the dimension equals the number of terms in the vocabulary
	// each position in the vector corresponds to the term-index
	// and each value is le log(...)
	private void createIidf(Vocabulary voc, int l) {
		// DenseDoubleMatrix1D is an array of types double
		Iidf = new DenseDoubleMatrix1D(voc.entrySet().size());
		// iterates over the types
		Iterator it = voc.entrySet().iterator();
		while (it.hasNext()) {
			Map.Entry me = (Map.Entry) it.next();
			String term = (String) me.getKey();

			Vocabulary.TermFrequency tf = (Vocabulary.TermFrequency) me
					.getValue();
			int index = termIndex.getIndex(term);
			// logger.info("term: "+term.toString()+" index: "+index+" frequency: "
			// + tf.toString());
			// log (l / df)
			// Iidf[index]=log((double) (l / tf.get()))
			Iidf.setQuick(index, Math.log((double) (l / tf.get())));
			// logger.info("index "+index+" log: "+Iidf.getQuick(index));
			// natural df
			// Iidf.setQuick(index, tf.get());
		} // end while

	} // end createIidf

	//
	private void normalize() {
		// for each row of the binary sparse matrix termXdocument is calculates
		// the sum of squares of each element
		// and divides each element for the sum
		logger.debug("normalize: Iidf[" + Iidf.size() + " X " + Iidf.size()
				+ "] * X[" + X.rows() + " X " + X.columns() + "]");

		// X.rows = N
		for (int i = 0; i < X.rows(); i++) {
			DoubleMatrix1D row = X.viewRow(i);

			double sum = 0;
			for (int j = 0; j < row.size(); j++)
				sum += Math.pow(row.getQuick(j), 2);

			// normalization
			sum = Math.sqrt(sum);
			for (int j = 0; j < row.size(); j++)
				row.setQuick(j, row.getQuick(j) / sum);
		} // end for i

	} // end normalize

	//
	private void idf() {
		logger.debug("idf: Iidf[" + Iidf.size() + " X " + Iidf.size()
				+ "] * X[" + X.rows() + " X " + X.columns() + "]");

		// X.rows = N
		for (int i = 0; i < X.rows(); i++) {
			DoubleMatrix1D row = X.viewRow(i);
			// logger.info(X.toString());
			// logger.info(Iidf.toString());
			// I^idf x X
			for (int j = 0; j < row.size(); j++)
				row.setQuick(j, row.getQuick(j) * Iidf.getQuick(i));

		} // end for i

	} // end normalize

	/**
	 *
	 */
	public double compare(String t1, String t2) {
		int i1 = termIndex.getIndex(t1);
		DoubleMatrix1D x1 = X.viewRow(i1);

		int i2 = termIndex.getIndex(t2);
		DoubleMatrix1D x2 = X.viewRow(i2);

		double dot = cosine(x1, x2);
		System.out.println(t1 + " * " + t2 + " = " + dot);

		return dot;
	} // end compare

	/**
	 *
	 */
	public ScoreTermMap[] compareAll(String[] terms) {
		System.out.println("compareAll");
		ScoreTermMap[] result = new ScoreTermMap[terms.length];

		for (int i = 0; i < terms.length; i++) {
			result[i] = compareAll(terms[i]);

		} // end for

		return result;
	} // end compareAll

	//
	public ScoreTermMap compareAll(String t) {
		System.out.println("compare all to " + t);
		ScoreTermMap map = new ScoreTermMap(t, size);

		Iterator it = termIndex.termSet().iterator();
		while (it.hasNext()) {
			String t2 = (String) it.next();

			double dot = compare(t, t2);

			if (dot > threshold) {
				map.put(dot, t2);
			}
		} // end for i

		return map;
	} // end similarity

	//
	public static void main(String[] args) throws Exception {
		String logConfig = System.getProperty("log-config");
		if (logConfig == null)
			logConfig = "log-config.txt";

		long begin = System.currentTimeMillis();

		PropertyConfigurator.configure(logConfig);

		if (args.length < 4) {
			System.out.println(getHelp());
			System.exit(1);
		}

		File X = new File(args[0] + "-matrix");
		File r = new File(args[0] + "-row");
		File c = new File(args[0] + "-col");
		File df = new File(args[0] + "-df");
		double threshold = Double.parseDouble(args[1]);
		int size = Integer.parseInt(args[2]);

		String[] terms = new String[args.length - 3];

		for (int i = 0; i < terms.length; i++)
			terms[i] = args[i + 3];

		TermSimilarity ts = new CosineTermSimilarity(X, r, c, df, threshold,
				size);
		ScoreTermMap map = ts.compareAll(terms[0]);
		System.out.println(map);
		/*
		 * // print it Iterator it = map.entrySet().iterator(); while
		 * (it.hasNext()) { Map.Entry me = (Map.Entry) it.next(); Double dot =
		 * (Double) me.getKey(); String term = (String) me.getValue();
		 * System.out.println(terms[0] + " * " + term + " = " + dot); } // end
		 * while
		 */

		long end = System.currentTimeMillis();
		System.out.println("term similarity calculated in " + (end - begin)
				+ " ms");
	} // end main

	/**
	 * Returns a command-line help.
	 * 
	 * return a command-line help.
	 */
	private static String getHelp() {
		StringBuffer sb = new StringBuffer();

		// License
		sb.append(License.get());

		// Usage
		sb
				.append("Usage: java -mx1024M org.fbk.it.hlt.jlsi.CosineTermSimilarity input threshold size (term)+\n\n");

		// Arguments
		sb.append("Arguments:\n");
		sb
				.append("\tinput\t\t-> root of files from which to read the term-by-document matrix (in sparse binary format), row index, col index and term document frequency\n");
		// the scores under the threshold are not displayed
		sb.append("\tthreshold\t-> similarity threshold\n");
		sb.append("\tsize\t\t-> number of similar terms to return\n");
		sb.append("\tterm\t\t-> input term\n");

		// Arguments
		// sb.append("Arguments:\n");

		return sb.toString();
	} // end getHelp

} // end class CosineTermSimilarity