package org.terrier.matching.smart;

import java.util.ArrayList;
import java.util.Map;

import org.terrier.matching.MatchingQueryTerms;
import org.terrier.matching.models.WeightingModel;
import org.terrier.structures.EntryStatistics;
import org.terrier.structures.Index;
import org.terrier.structures.IndexUtil;
import org.terrier.structures.Lexicon;
import org.terrier.structures.LexiconEntry;
import org.terrier.structures.collections.MapEntry;
import org.terrier.utility.ApplicationSetup;

public class ExtendedSmartMatching extends SmartMatching{
	protected Index extIndex;
	
	protected Lexicon extLexicon;
	
	public ExtendedSmartMatching(Index index){
		super(index);
		extIndex = Index.createIndex(
				ApplicationSetup.getProperty("external.index.path", "must be given"),
				ApplicationSetup.getProperty("external.index.prefix", "data"));
		extLexicon = extIndex.getLexicon();
	}
	
	protected void initialise(MatchingQueryTerms queryTerms) 
	{
		//System.gc();
		
		updateStartTime(System.currentTimeMillis());
		
		RETRIEVED_SET_SIZE   = Integer.parseInt(ApplicationSetup.getProperty("matching.retrieved_set_size", "1000"));
		IGNORE_LOW_IDF_TERMS = Boolean.parseBoolean(ApplicationSetup.getProperty("ignore.low.idf.terms","true"));
		MATCH_EMPTY_QUERY    = Boolean.parseBoolean(ApplicationSetup.getProperty("match.empty.query","false"));
		
		this.numberOfRetrievedDocuments = 0;

		// We purge the query terms not present in the lexicon and retrieve the information from the lexicon
		String[] queryTermStrings = queryTerms.getTerms();
		queryTermsToMatchList = new ArrayList<Map.Entry<String,LexiconEntry>>(queryTermStrings.length);
		
		for (String queryTerm: queryTermStrings) {
			LexiconEntry t = extLexicon.getLexiconEntry(queryTerm);
			if (t != null) {
				//check if the term IDF is very low.
				if (IGNORE_LOW_IDF_TERMS && collectionStatistics.getNumberOfDocuments() < t.getFrequency()) {
					logger.warn("query term " + queryTerm + " has low idf - ignored from scoring.");
					continue;
				}
				// check if the term has weighting models
				WeightingModel[] termWeightingModels = queryTerms.getTermWeightingModels(queryTerm);
				if (termWeightingModels.length == 0) {
					logger.warn("No weighting models for term " + queryTerm +", skipping scoring");
					continue;
				}
				queryTermsToMatchList.add(new MapEntry<String, LexiconEntry>(queryTerm, t));
			}
			else
				logger.warn("Term Not Found: " + queryTerm);			
		}

		//logger.warn("queryTermsToMatchList = " + queryTermsToMatchList.size());
		int queryLength = queryTermsToMatchList.size();
		
		wm = new WeightingModel[queryLength][];
		this.ACCEPT_NEGATIVE_SCORE = false;
		for (int i = 0; i < queryLength; i++) 
		{
			Map.Entry<String, LexiconEntry> termEntry    = queryTermsToMatchList.get(i);
			String 							queryTerm    = termEntry.getKey();
			LexiconEntry 					lexiconEntry = termEntry.getValue();
			//get the entry statistics - perhaps this came from "far away"
			EntryStatistics entryStats = queryTerms.getStatistics(queryTerm);
			//we seek the query term in the lexicon
			if (entryStats == null)
				entryStats = lexiconEntry;
			queryTerms.setTermProperty(queryTerm, lexiconEntry);

			// Initialise the weighting models for this term
			int numWM = queryTerms.getTermWeightingModels(queryTerm).length;
			wm[i] = new WeightingModel[numWM];
			for (int j = 0; j < numWM; j++) {				
				wm[i][j] = (WeightingModel) queryTerms.getTermWeightingModels(queryTerm)[j].clone();
				wm[i][j].setBackgroundStatistics(collectionStatistics);
				wm[i][j].setEntryStatistics(entryStats);
				wm[i][j].setRequest(queryTerms.getRequest());
				wm[i][j].setKeyFrequency(queryTerms.getTermWeight(queryTerm));
				IndexUtil.configure(index, wm[i][j]);
				if (!this.ACCEPT_NEGATIVE_SCORE && wm[i][j].ACCEPT_NEGATIVE_SCORE)
					this.ACCEPT_NEGATIVE_SCORE = true;
				// wm[i][j].prepare();
			}
		}
	}
}
