package org.terrier.querying.termselector;

import java.io.IOException;

import java.util.Arrays;

import gnu.trove.THashMap;
import gnu.trove.THashSet;
import gnu.trove.TIntHashSet;
import gnu.trove.TIntIntHashMap;
import gnu.trove.TIntObjectHashMap;

import org.apache.log4j.Logger;

import org.terrier.matching.MatchingQueryTerms;
import org.terrier.matching.ResultSet;
import org.terrier.matching.models.WeightingModel;
import org.terrier.structures.DirectIndex;
import org.terrier.structures.ExpansionTerm;
import org.terrier.structures.Index;
import org.terrier.structures.InvertedIndex;
import org.terrier.structures.Lexicon;
import org.terrier.structures.LexiconEntry;
import org.terrier.utility.ApplicationSetup;

/**
 * This class implements a contract for term selectors.
 * A term selector selects the most important words from a set of (feedback) documents
 * using a given query expansion model, and merges the selected terms with the original
 * query based on its underlying query merging criteria.
 * @author Ben He (benhe@gucas.ac.cn)
 *
 */
public abstract class TermSelector {
	/** The logger used */
	private static Logger logger = Logger.getRootLogger();
	/**
	 * The number of (pseudo) feedback documents. 
	 */
	protected double EXPANSION_DOCUMENTS = 
		Integer.parseInt(ApplicationSetup.getProperty("expansion.documents", "3"));
	/** The number of the most weighted terms from the pseudo relevance set 
	 * to be added to the original query. There can be overlap between the 
	 * original query terms and the added terms from the pseudo relevance set.*/
	protected double EXPANSION_TERMS = 
		Integer.parseInt(ApplicationSetup.getProperty("expansion.terms", "10"));
	
	/** Rocchio's alpha for query expansion.*/
	public double ROCCHIO_ALPHA;
	
	/** Rocchio's beta for query expansion..*/
	public double ROCCHIO_BETA;
	
	/** Mapping from termid to candidate terms. */
	protected TIntObjectHashMap<ExpansionTerm> termMap;
	
	/** Ids of terms in the original query. */
	protected TIntHashSet originalQueryTermidSet;
	/** The index structure. */
	protected Index index;
	/** Pointer to the lexicon of the index. */
	protected Lexicon lexicon;
	/** Pointer to the direct (forward) index. */
	protected DirectIndex di;
	/** Pointer to the inverted index. */
	protected InvertedIndex ii;
	/** The number of tokens in the feedback documents. */
	protected int feedbackSetLength;
	/**  
	 * Get the number of tokens in the feedback documents.
	 * */
	public int getFeedbackSetLength() {
		return feedbackSetLength;
	}
	/**
	 * Set the number of tokens in the feedback documents.
	 * @param feedbackSetLength The number of tokens in the feedback documents.
	 */
	public void setFeedbackSetLength(int feedbackSetLength) {
		this.feedbackSetLength = feedbackSetLength;
	}
	/** The number of feedback documents. */
	protected int feedbackSetSize;
	/** The ranked list of returned documents. */
	protected ResultSet resultSet;
	/** A hashmap for meta information.  */
	protected THashMap<String, String> metaMap = new THashMap<String, String>();
	/**
	 * The constructor.
	 * @param index The index structure.
	 */
	public TermSelector(Index index) {
		this();
		this.setIndex(index);
	}
	/**
	 * Default constructor.
	 */
	public TermSelector(){
		/* Accept both rocchio.beta and rocchio_beta as property name. rocchio_beta will deprecated in due course. */
		ROCCHIO_BETA = Double.parseDouble(ApplicationSetup.getProperty("rocchio.beta", ApplicationSetup.getProperty("rocchio_beta", "0.4d")));
		ROCCHIO_ALPHA = Double.parseDouble(ApplicationSetup.getProperty("rocchio.alpha", ApplicationSetup.getProperty("rocchio_alpha", "1d")));
	}
	/**
	 * Set the index structure.
	 * @param index The index.
	 */
	public void setIndex(Index index){
		this.index = index;
		lexicon = index.getLexicon();
		di = index.getDirectIndex();
		ii = index.getInvertedIndex();
	}
	/**
	 * Set the result set.
	 * @param results The list of retrieved documents
	 */
	public void setResultSet(ResultSet results){
		this.resultSet = results;
	}
	/**
	 * Get the candidate terms with highest expansion weights.
	 * @param numberOfExpandedTerms The number of candidate terms to return.
	 * @return Candidate terms with highest expansion weights.
	 */
	public ExpansionTerm[] getMostWeightedTerms(int numberOfExpandedTerms){
		if (termMap==null){
			ExpansionTerm[] expTerms = {};
			return expTerms;
		}
		int n = Math.min(numberOfExpandedTerms, termMap.size());
		// Is conservative query expansion? Conservative QE only reweighs the original
		// query terms without adding expansion terms to the query.
		boolean conservativeQE = (numberOfExpandedTerms==0 && this.originalQueryTermidSet != null);
		THashSet<ExpansionTerm> tSet = new THashSet<ExpansionTerm>();
		Object[] obj = termMap.getValues();
		int len = obj.length;
		ExpansionTerm[] terms = new ExpansionTerm[len];
		for (int i=0; i<len; i++)
			terms[i] = (ExpansionTerm)obj[i];
		Arrays.sort(terms);
		if (!conservativeQE){
			for (int i=0; i<n; i++)
				if (terms[i].getWeightExpansion()>0d)
					tSet.add(terms[i]);
		}else{
			for (int i=0; i<len; i++)
				if (this.originalQueryTermidSet.contains(terms[i].getTermID())){
					tSet.add(terms[i]);
					if (tSet.size() == this.originalQueryTermidSet.size())
						break;
				}
		}
		ExpansionTerm[] toReturn = (ExpansionTerm[])tSet.toArray(new ExpansionTerm[tSet.size()]);
		Arrays.sort(toReturn);
		return toReturn;
	}
	/**
	 * An abstract method for merging the expansion terms with the given query.
	 * @param QEModel The query expansion model.
	 * @param query The given query.
	 * @param numberOfExpansionTerms The number of expansion terms.
	 */
	abstract public void mergeWithQuery(WeightingModel QEModel, MatchingQueryTerms query, int numberOfExpansionTerms);
	/**
	 * Get the candidate terms with highest expansion weights in a hashmap.
	 * @param numberOfExpandedTerms The number of candidate terms to return.
	 * @return Hashmap from term id to the expansion term.
	 */
	public TIntObjectHashMap<ExpansionTerm> getMostWeightedTermsInHashMap(int numberOfExpandedTerms){
		TIntObjectHashMap<ExpansionTerm> tMap = new TIntObjectHashMap<ExpansionTerm>();
		if (termMap==null)
			return tMap;
		int n = Math.min(numberOfExpandedTerms, termMap.size());
		// Is conservative query expansion? Conservative QE only reweighs the original
		// query terms without adding expansion terms to the query.
		boolean conservativeQE = (numberOfExpandedTerms==0 && this.originalQueryTermidSet != null);
		Object[] obj = termMap.getValues();
		int len = obj.length;
		ExpansionTerm[] terms = new ExpansionTerm[obj.length];
		for (int i=0; i<len; i++)
			terms[i] = (ExpansionTerm)obj[i];
		Arrays.sort(terms);
		if (!conservativeQE){
			for (int i=0; i<n; i++)
				if (terms[i].getWeightExpansion()>0d)
					tMap.put(terms[i].getTermID(), terms[i]);
		}else{
			for (int i=0; i<len; i++)
				if (this.originalQueryTermidSet.contains(terms[i].getTermID())){
					tMap.put(terms[i].getTermID(), terms[i]);
					if (tMap.size() == this.originalQueryTermidSet.size())
						break;
				}
		}
		return tMap;
	}
	/**
	 * Get the number of unique terms in the feedback document set.
	 * @return The number of unique terms in the feedback document set.
	 */
	public int getNumberOfUniqueTerms(){
		int nTerms = 0;
		if (termMap!=null)
			nTerms = termMap.size();
		return nTerms;
	}
	/**
	 * Set the meta information.
	 * @param property The name of the meta property.
	 * @param value The value of the property.
	 */
	public void setMetaInfo(String property, String value){
		metaMap.put(property, value);
	}
	/**
	 * Extract terms in the given documents from direct index.
	 * @param docids The ids of the given documents.
	 */
	protected void getTerms(int[] docids){
		this.feedbackSetLength = 0;
		this.feedbackSetSize = 0;
		termMap = new TIntObjectHashMap<ExpansionTerm>();
		try{
			for (int docid : docids) {
				int[][] terms = di.getTerms(docid);
				if (terms == null)
					logger.warn("document "+"("+docid+") not found");
				else{
					this.feedbackSetSize++;
					feedbackSetLength += terms[0].length;
					for (int j = 0; j < terms[0].length; j++)
						this.insertTerm(terms[0][j], (double)terms[1][j]);
				}
			}
		}catch(IOException ioe){
			ioe.printStackTrace();
			System.exit(1);
		}
	}
	/**
	 * 
	 * @param docid
	 * @return
	 */
	protected TIntIntHashMap extractTerms(int docid){
		TIntIntHashMap termidFreqMap = new TIntIntHashMap();
		try{
			int[][] terms = di.getTerms(docid);
			if (terms == null)
				logger.warn("document "+"("+docid+") not found");
			else{
				for (int j = 0; j < terms[0].length; j++)
					termidFreqMap.put(terms[0][j], terms[1][j]);
			}
		}catch(IOException ioe){
			ioe.printStackTrace();
			System.exit(1);
		}
		return termidFreqMap;
	}
	
	protected void getTerms(TIntIntHashMap[] termidFreqMaps){
		// logger.debug("termidFreqMaps.length: "+termidFreqMaps.length);
		// logger.debug("termidFreqMaps[0].size(): "+termidFreqMaps[0].size());
		this.feedbackSetLength = 0;
		termMap = new TIntObjectHashMap<ExpansionTerm>();
		for (TIntIntHashMap map : termidFreqMaps){
			if (map.size()!=0)
				this.feedbackSetSize++;
			int[] termids = map.keys();
			for (int termid : termids){
				final int freq = map.get(termid);
				if (freq>0){
					feedbackSetLength += freq;
					this.insertTerm(termid, (double)freq);
				}
			}
		}
		// logger.debug("termMap.size(): "+termMap.size());
	}
	
	/**
 	* Add a term in the X top-retrieved documents as a candidate of the 
	* expanded terms.
 	* @param termID int the integer identifier of a term
 	* @param withinDocumentFrequency double the within document 
 	*		frequency of a term
 	*/
	protected void insertTerm(int termID, double withinDocumentFrequency) {
		final ExpansionTerm et = termMap.get(termID);
		if (et == null)
			termMap.put(termID, new ExpansionTerm(termID, withinDocumentFrequency));
		else
			et.insertRecord(withinDocumentFrequency);
	}
	
	public static TermSelector getDefaultTermSelector(Index index){
		String prefix = "org.terrier.querying.termselector.";
		String name = ApplicationSetup.getProperty("term.selector.name", 
				"org.terrier.querying.termselector.RocchioTermSelector");
		if (name.indexOf('.')<0)
			name = prefix.concat(name);
		TermSelector selector = null;
		try{
			selector = (TermSelector)Class.forName(name).newInstance();
		}catch(Exception e){
			logger.warn("Error while initializing TermSelector "+name);
			e.printStackTrace();
		}
		selector.setIndex(index);
		return selector;
	}
	
	public static TermSelector getTermSelector(String name, Index index){
		String prefix = "org.terrier.querying.termselector.";
		if (name.indexOf('.')<0)
			name = prefix.concat(name);
		TermSelector selector = null;
		try{
			selector = (TermSelector)Class.forName(name).newInstance();
		}catch(Exception e){
			logger.warn("Error while initializing TermSelector "+name);
			e.printStackTrace();
		}
		selector.setIndex(index);
		return selector;
	}
	
	public abstract void assignTermWeights(ResultSet resultSet, int feedbackSetSize, WeightingModel QEModel, Lexicon bgLexicon);
	
	
	public abstract void assignTermWeights(int[] docids, WeightingModel QEModel, Lexicon bgLexicon);
	
	public abstract void assignTermWeights(TIntIntHashMap[] termidFreqMap, WeightingModel QEModel, 
		TIntIntHashMap bgTermidFreqMap, TIntIntHashMap bgTermidDocfreqMap);
	
	public void setOriginalQueryTermids(int[] termids){
		originalQueryTermidSet = new TIntHashSet();
		originalQueryTermidSet.addAll(termids);
	}
	
	public String getInfo(){
		return String.valueOf(ROCCHIO_BETA);
	}
	
	public void setOriginalQueryTerms(String[] termStrings){
		this.originalQueryTermidSet = new TIntHashSet();
		for (String term : termStrings){
			LexiconEntry le = lexicon.getLexiconEntry(term);
			if (le!=null)
				originalQueryTermidSet.add(le.getTermId());
		}
	}
}
