package ca.uwindsor.cs.deepweb.estimation.method.border;

import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;

import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.search.Hits;

/**
 * @author Liang Jie
 *
 */
public class MatrixNew {

	protected String Field;

	protected Set<String> uniqueid;

	protected HashMap<String, Set<String>> m;

	protected HashMap<String, Integer> inverseDocumentWeight;

	protected HashMap<String, Float> QueryWeight;

	/**
	 * @param FieldName
	 * @param initialCapacity
	 */
	public MatrixNew(String FieldName, int initialCapacity) {
		uniqueid = new HashSet<String>(initialCapacity);
		Field = FieldName;
		m = new HashMap<String, Set<String>>(initialCapacity);
		inverseDocumentWeight = new HashMap<String, Integer>(initialCapacity);
		QueryWeight = new HashMap<String, Float>(initialCapacity);
	}

	/**
	 * @param q
	 * @param hits
	 * @throws CorruptIndexException
	 * @throws IOException
	 * @deprecated
	 */
	public void addQuery(String q, Hits hits) throws CorruptIndexException, IOException {
		int index;
		int length = hits.length();
		String id;
		Set<String> documents = new HashSet<String>(length);
		for (index = 0; index < length; index++) {
			id = hits.doc(index).get(Field);
			documents.add(id);
			uniqueid.add(id);
		}
		m.put(q, documents);
	}

	/**
	 * @param q
	 * @param documents
	 */
	public void addQuery(String q, Set<String> documents) {
		uniqueid.addAll(documents);
		m.put(q, documents);
	}

	/**
	 * 
	 */
	public void calculateInverseDocumentWeight() {
		Set<Entry<String, Set<String>>> row = m.entrySet();
		for (Entry<String, Set<String>> e : row) {
			Set<String> docs = e.getValue();
			for (String docid : docs) {
				if (!inverseDocumentWeight.containsKey(docid)) {
					inverseDocumentWeight.put(docid, 1);
				} else {
					int i = inverseDocumentWeight.get(docid);
					i++;
					inverseDocumentWeight.put(docid, i);
				}
			}
		}
	}

	/**
	 * 
	 */
	public void calculateQueryWeight() {
		Set<Entry<String, Set<String>>> row = m.entrySet();
		for (Entry<String, Set<String>> e : row) {
			float weight = 0;
			String query = e.getKey();
			Set<String> docs = e.getValue();
			for (String docid : docs) {
				if (inverseDocumentWeight.containsKey(docid)) {
					weight += 1 / inverseDocumentWeight.get(docid).floatValue();
				}
			}
			QueryWeight.put(query, weight);
		}
	}

	/**
	 * @param term
	 * @return the document frequency of a term
	 */
	public int getDocumentFrequencybyTerm(String term) {
		Set<String> s = m.get(term);
		if (s != null) {
			return s.size();
		} else {
			return 0;
		}
	}

	/**
	 * @return the documentWeight
	 */
	public HashMap<String, Integer> getInverseDocumentWeight() {
		return inverseDocumentWeight;
	}

	/**
	 * @return the queryWeight
	 */
	public HashMap<String, Float> getQueryWeight() {
		return QueryWeight;
	}

	/**
	 * @return the mean weight of queries
	 */
	public double getMeanQueryWeight() {
		double mean = 0;
		double sum = 0;
		Set<Entry<String, Float>> row = QueryWeight.entrySet();
		for (Entry<String, Float> e : row) {
			sum += e.getValue();
		}
		mean = sum / QueryWeight.size();
		return mean;
	}

	/**
	 * @return the set of unique ids
	 */
	public Set<String> getUniqueIDset() {
		return this.uniqueid;
	}

	/**
	 * @return the matrix
	 */
	public HashMap<String, Set<String>> getMatrix() {
		return this.m;
	}

}
