package edu.ptit.jad.preprocess.analysis;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
import java.util.StringTokenizer;

import edu.ptit.jad.similarity.CosineSimilarityMeasure;

public class TFIDFMeasure {
	private String[] _docs;

	private String[][] _ngramDoc;
	private int _numDocs = 0;
	private int _numTerms = 0;
	private ArrayList _terms;
	private int[][] _termFreq;
	private float[][] _termWeight;
	private int[] _maxTermFreq;
	private int[] _docFreq;

	private Map<String, Integer> GetWordFrequency(String text) {

		Map<String, Integer> termFrequencyMap = new HashMap<String, Integer>();

		StringTokenizer tknr = new StringTokenizer(text, " ");
		String term = "";

		while (tknr.hasMoreTokens()) {

			// allTermsSet.add(tknr.nextToken());
			term = tknr.nextToken();
			Integer frequency = termFrequencyMap.get(term);
			if (frequency == null) {
				termFrequencyMap.put(term, 1);
			} else {
				termFrequencyMap.put(term, frequency + 1);
			}
		}

		return termFrequencyMap;
	}

	private Map<String, Integer> _wordsIndex = new HashMap<String, Integer>();

	public TFIDFMeasure(String[] documents) {
		_docs = documents;
		_numDocs = documents.length;
		MyInit();
	}

	private void GeneratNgramText() {

	}

	private ArrayList GenerateTerms(String[] docs) {
		ArrayList uniques = new ArrayList();
		_ngramDoc = new String[_numDocs][];
		for (int i = 0; i < docs.length; i++) {

			StringTokenizer tknr = new StringTokenizer(docs[i], " ");
			String term = "";

			while (tknr.hasMoreTokens()) {

				// allTermsSet.add(tknr.nextToken());
				term = tknr.nextToken();

				if (!uniques.contains(term))
					uniques.add((term));

			}

		}
		return uniques;
	}

	private static Integer AddElement(Map<String, Integer> collection,
			String key, int newValue) {
		Integer element = collection.get(key);
		collection.put(key, newValue);
		return element;
	}

	private int GetTermIndex(String term) {
		int index = _wordsIndex.get(term);
		// if (index == null)
		// return -1;
		return (int) index;
	}

	private void MyInit() {
		_terms = GenerateTerms(_docs);
		_numTerms = _terms.size();

		_maxTermFreq = new int[_numDocs];
		_docFreq = new int[_numTerms];
		_termFreq = new int[_numTerms][];
		_termWeight = new float[_numTerms][];

		for (int i = 0; i < _terms.size(); i++) {
			_termWeight[i] = new float[_numDocs];
			_termFreq[i] = new int[_numDocs];

			AddElement(_wordsIndex, (String) _terms.get(i), i);
		}

		GenerateTermFrequency();
		GenerateTermWeight();

	}

	private float Log(float num) {
		return (float) Math.log(num);// log2
	}

	private void GenerateTermFrequency() {
		for (int i = 0; i < _numDocs; i++) {
			String curDoc = _docs[i];
			Map<String, Integer> freq = GetWordFrequency(curDoc);

			for (Map.Entry<String, Integer> e : freq.entrySet()) {

				String	 word = (String) e.getKey();
				int wordFreq = (int) e.getValue();
				int termIndex = GetTermIndex(word);

				_termFreq[termIndex][i] = wordFreq;
				_docFreq[termIndex]++;

				if (wordFreq > _maxTermFreq[i])
					_maxTermFreq[i] = wordFreq;
			}
			_maxTermFreq[i] = Integer.MIN_VALUE;

		}

	}

	private void GenerateTermWeight() {

		for (int i = 0; i < _numTerms; i++) {
			for (int j = 0; j < _numDocs; j++) {
				_termWeight[i][j] = ComputeTermWeight(i, j);

			}

		}

	}

	private float GetTermFrequency(int term, int doc) {
		int freq = _termFreq[term][doc];
		int maxfreq = _maxTermFreq[doc];

		return ((float) freq / (float) maxfreq);
	}

	private float GetInverseDocumentFrequency(int term) {
		int df = _docFreq[term];
		return Log((float) (_numDocs) / (float) df);
	}

	private float ComputeTermWeight(int term, int doc) {
		float tf = GetTermFrequency(term, doc);
		float idf = GetInverseDocumentFrequency(term);
		return tf * idf;
	}

	private float[] GetTermVector(int doc) {
		float[] w = new float[_numTerms];
		for (int i = 0; i < _numTerms; i++)
			w[i] = _termWeight[i][doc];

		return w;
	}

	public double getSimilarity(int doc_i, int doc_j) {
		float[] vector1 = GetTermVector(doc_i);
		float[] vector2 = GetTermVector(doc_j);

		return new CosineSimilarityMeasure().calculate(vector1, vector2);

	}

	private String[] GetDistinctWords(String[] input) {
		if (input == null)
			return new String[0];
		else {
			ArrayList list = new ArrayList();

			for (int i = 0; i < input.length; i++)
				if (!list.contains(input[i])) // N-GRAM SIMILARITY?
					list.add(input[i]);

			return (String[]) list.toArray();
		}
	}

	private int CountWords(String word, String[] words) {
		int itemIdx = Arrays.binarySearch(words, word);

		if (itemIdx > 0)
			while (itemIdx > 0 && words[itemIdx].equals(word))
				itemIdx--;

		int count = 0;
		while (itemIdx < words.length && itemIdx >= 0) {
			if (words[itemIdx].equals(word))
				count++;

			itemIdx++;
			if (itemIdx < words.length)
				if (!words[itemIdx].equals(word))
					break;

		}

		return count;
	}

}
