package util.ranker;



import java.io.IOException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map.Entry;

import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.queryParser.ParseException;

import util.hashing.Sorting;
import util.io.FileInput;
import util.ir.LMSearcher;
import util.ir.LMSearcherMix;
import util.models.LanguageModel;
import util.models.ObjectMixModel;

public class TagRankingMixInMemory {

	private static final int TOP_N = 200;
	public static String bigram_index = "/home/sergio/projects/delicious_index/bigrams_mix/bigrams_enriched.txt";
	public static String trigram_index = "/home/sergio/delicious_index/projects/trigrams_index_mix/tringrams_enriched.txt";

	float lambda = 0.9f;

	/**
	 * Receives tags representing the query The seed tags
	 * 
	 * Output
	 * 
	 */

	Hashtable<String, Float> seed_tags = new Hashtable<String, Float>();


	

	String type_foreground = "/";
	String type_foreground_b = "/";
	String type_background = "/";

	float query_total = 0;

	private LanguageModel lm_foreground = new LanguageModel();

	private LanguageModel lm_background = new LanguageModel();

	

	// flag modes

	boolean query_weight = true; // weight inside the aggreagater for each t
	boolean candidate_query_weight = true; // candidate query weight
	// boolean prob_t2=true;
	boolean prob_weight = true; // probability of s in the foreground collection
	
	
	ObjectMixModel main_model = null; 
	ObjectMixModel added_model = null;
	
	public TagRankingMixInMemory(String type_fore, String type_back,	String type_fore_b)
			throws IOException, ParseException {
		// trigram_searcher = new LMSearcher(trigram_index);


		
		

		type_foreground = type_fore;
		type_background = type_back;
		this.type_foreground_b = type_fore_b;

		
		initLanguageModel();
		
		lm_foreground= main_model.model_fore;
		lm_background = main_model.model_back;

	}

	public float getQueryTagsTotal(Hashtable<String, Float> query) {

		Enumeration<String> keys = query.keys();
		HashSet<String>t_tags=new HashSet<String>();
		
		while (keys.hasMoreElements()) {
			String key = keys.nextElement();
			t_tags.add(key);
			query_total = query_total + query.get(key);

		}
		
		return query_total;
	}

	

	public void initLanguageModel()
			throws CorruptIndexException, IOException, ParseException {

		
		
		main_model= new ObjectMixModel(bigram_index, null, type_foreground);
		
				
		if (!type_foreground_b.equals(type_foreground)) {
			
			added_model= new ObjectMixModel(bigram_index, null, type_foreground_b);
			
			main_model.model_fore.addLanguageModel(added_model.model_fore);
			main_model.model_back.unionLanguageModel(added_model.model_back);

		}
		
	
	}

	public void initSeedTags(Hashtable<String, Float> s_tags)
			throws CorruptIndexException, IOException, ParseException {

		Enumeration<String> keys = s_tags.keys();

		while (keys.hasMoreElements()) {

			String seed = keys.nextElement();

			float val = s_tags.get(seed);

			seed_tags.put(seed, val);

		}

	}

	public Iterator<Entry<String, Float>> rankSuggestions(Hashtable<String,Float> query_hash) {

		/*
		 * For each candidate tag S
		 */

		
		
		Hashtable<String, Float> scores = new Hashtable<String, Float>();
		Iterator<String> s_enum = lm_foreground.candidates.iterator();
		// System.out.println("Candidates size " +
		// lm_foreground.candidates.size());
		
		
		float total_query= getQueryTagsTotal(query_hash);

		while (s_enum.hasNext()) {

			String s = s_enum.next();
			float score = aggregate_t(s, query_hash, total_query);
			/*
			 * Group by k
			 */

			// wieght by probability of s , or KL of s
			float freq_s_foreground = lm_foreground.getUnigramFreq(s);
			float freq_s_background = lm_background.getUnigramFreq(s);

			float prob_s_fore = freq_s_foreground / TOTALS.KIDS_UNIGRAM;
			float prob_s_back = freq_s_background / TOTALS.ADULTS_UNIGRAM;

			float prob_s = 0f;

			if (prob_weight) {

				prob_s = (float) Math.log(prob_s_fore);

			}

			// get candidate_query_weight

			float candidate_q_w = 0.0f;

			if (candidate_query_weight && query_hash.containsKey(s)) {
				candidate_q_w = (float) (query_hash.get(s) / query_total);

				candidate_q_w = (float) Math.log(candidate_q_w);
			}

			// score = score * candidate_q_w * prob_s;

			score = score + candidate_q_w + prob_s;
			scores.put(s, score);
			// System.out.println(s + "\t"+ score);

		}

		Iterator<Entry<String, Float>> tuples = Sorting.sortHashNumericValues(
				scores, false);

		// scores.clear();
		// scores = null;

		return tuples;
	}

	private float aggregate_t(String s, Hashtable<String, Float> query, float total_query) {
		// TODO Auto-generated method stub

		float total = 0f;

		Enumeration<String> iter = query.keys();
		// for each term in the query

		
		
		
		
		while (iter.hasMoreElements()) {

			/**
			 * 
			 * For each t in k given s
			 * 
			 */
			String t = iter.nextElement();
			// System.out.println("s:" + s + "\tt:" + t);

			// Score for t tag

			// we are going to ignore t s that does not exists in the foreground
			// model
			// except the case in whith s is in the query set"
			if (lm_foreground.unigrams.containsKey(t) || (t.equals(s))) {
				float score = score_tag_mix(s, t, lambda, query, total_query);

				total = total + score;

			}

		}

		return total;

	}

	private float score_tag_mix(String s, String t, float lambda, Hashtable<String, Float> query_hash, float query_total) {
		// TODO Auto-generated method stub

		float freq_st_foreground = lm_foreground.getBigramFreq(s, t); // check
		// order

		float freq_st_background = lm_background.getBigramFreq(s, t); // check
		// order

		float freq_t_foreground = lm_foreground.getUnigramFreq(t);
		float freq_t_background = lm_background.getUnigramFreq(t);

		float freq_s_foreground = lm_foreground.getUnigramFreq(s);
		float freq_s_background = lm_background.getUnigramFreq(s);

		// Calculate conditional prob p(t|s)

		float prob_t_given_s_fore = freq_st_foreground / freq_s_foreground;
		float prob_t_given_s_back = freq_st_background / freq_s_background;

		// Join probabilities
		float prob_t_and_s_fore = freq_st_foreground / TOTALS.KIDS_BIGRAM;
		float prob_t_and_s_back = freq_st_background / TOTALS.ADULTS_BIGRAM;

		// uni probabilities

		float prob_t_fore = freq_t_foreground / TOTALS.KIDS_UNIGRAM;
		float prob_t_back = freq_t_background / TOTALS.ADULTS_UNIGRAM;

		float prob_s_fore = freq_s_foreground / TOTALS.KIDS_UNIGRAM;
		float prob_s_back = freq_s_background / TOTALS.ADULTS_UNIGRAM;

		// case in which

		// first foreground model

		if (prob_t_and_s_fore == 0 && s.equals(t)) {

			// we are going to threat the special case (s,s) as (s)
			prob_t_and_s_fore = prob_t_fore;
			prob_t_and_s_back = prob_t_back;

			// score = Float.MIN_VALUE;
			// prob_t_given_s_fore= prob_t_fore;

		}

		float score_fore = lambda * prob_t_and_s_fore + (1 - lambda)
				* prob_t_fore;
		// second background model
		float score_back = lambda * prob_t_and_s_back + (1 - lambda)
				* prob_t_back;
		// generate mix model

		float query_weight_t = 0.0f;
		if (query_weight) {

			query_weight_t = query_hash.get(t) / query_total;
			// System.out.println(query_weight_t + "\t" + "\t");
			query_weight_t = (float) Math.log(query_weight_t);
		}

		float score = (float) (score_fore * Math.log(score_fore / score_back));

		// not mix model

		// score= conditionalProbSmoothing(prob_t_given_s_fore, prob_t_fore);
		// score = (float) Math.log(score);

		// score = score/ query_weight_t;
		score = score + query_weight_t;

		if (score == 0.0f) {

			System.exit(0);
			System.out.println("Score is 0" + s + "\t" + t + "\t" + score_back);
		}

		boolean debug = false;
		if ((s.equals("inspiration") || s.equals("games")) && debug) {

			System.out.println("s:" + s + "\tt: " + t + "  lamda:" + lambda);
			System.out.println("freq_st_fore:" + freq_st_foreground
					+ "\tfreq_st_back:" + freq_st_background);
			System.out.println("freq_t_fore:" + freq_t_foreground
					+ "\tfreq_t_back:" + freq_t_background);
			System.out.println("prob_t_given_s:" + prob_t_given_s_fore
					+ "\tprob_t_and_s_fore:" + prob_t_and_s_fore);
			System.out.println("query_weight_t:" + query_weight_t
					+ "\tfreq_t_in_query" + query_hash.get(t));
			System.out.println("score_fore" + score_fore + "\tscore_back:"
					+ score_back);

		}

		if (Float.isInfinite(score) || Float.isNaN(score)) {

			System.out.println("Is infinite" + "\t" + query_weight_t + "\t"
					+ score_fore + "\t" + score_back);
			System.exit(1);
		}

		return score;
	}

	public float conditionalProbSmoothing(float cond, float prior) {

		float prob = 0.0f;

		prob = lambda * cond + (1f - lambda) * prior;
		prob = prob / prior;
		return prob;
	}

	public void clean() {

		lm_foreground.trigrams.clear();
		lm_foreground.bigrams.clear();
		lm_foreground.unigrams.clear();

		lm_background.trigrams.clear();
		lm_background.bigrams.clear();
		lm_background.unigrams.clear();

		// seed_tags.clear();
		lm_background = null;
		lm_foreground = null;

	}

	
	public static void main(String args[]) throws IOException, ParseException {

		
		String path_query = "/home/sergio/data/results/single_query/single_query.txt";

		Hashtable<String, Float> query = new Hashtable<String, Float>();
		// query.put("dora", 1f);
		query = TagRankingMix.buildQuery(path_query);

		// query.put("doll", 5f);

		TagRankingMixInMemory ranker = new TagRankingMixInMemory("kids", "adults", "kids-teens");

		// ranker.initLanguageModel(query);
		Iterator<Entry<String, Float>> scores = null;

		scores = ranker.rankSuggestions(query);
		int n = 500;
		int j = 0;

		while (scores.hasNext() && j < n) {

			j++;
			Entry<String, Float> entry = scores.next();
			System.out.println(j + "\t" + entry.getKey() + "\t"
					+ entry.getValue());

		}

	}

}

