/**
 * 
 */
package edu.umd.clip.lm.playground;

import java.util.*;

import edu.berkeley.nlp.util.*;
import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.model.training.*;
import edu.umd.clip.lm.util.*;
/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class ContextVariableEntropy {

	public static class Options {
        @Option(name = "-jobs", usage = "number of concurrent jobs (default: 1)")
        public int jobs = 1;
        @Option(name = "-words", usage = "the word vocabulary size (default: 10000)")
        public int nrWords = 10000;
        @Option(name = "-events", usage = "the number of random events (corpus size) (default: 100000)")
        public int nrEvents = 100000;
        @Option(name = "-exp", usage = "the number of experiments (default: 10)")
        public int nrExperiments = 10;
        @Option(name = "-sizes", usage = "the sizes, comma separated (default: 10,100,1000,10000)")
        public String sizes = "10,100,1000,10000";
	}
	
	static double zipfAccumulatedProbabilities[];
	
	private static class ExperimentRun {
		final Long2IntMap x2wordMap[];
		final int nrX;
		final int nrWords;
		final int nrEvents;
		final Random rnd;
		double entropy;
		double newEntropy;
		
		public ExperimentRun(int xSize, int nrWords, int nrEvents) {
			this.nrX = xSize;
			this.nrWords = nrWords;
			this.nrEvents = nrEvents;
			x2wordMap = new Long2IntMap[nrX];
			rnd = new Random();
		}
		
		public void doExperiment() {
			generateEvents();
			doExchange();
		}
		
		public void generateEvents() {
			Long2IntMap wordDist = new Long2IntMap(nrWords);
			
			for(int i=0; i<nrEvents; ++i) {
				// X's are distributed uniformly
				int x = rnd.nextInt(nrX);
				// so are words
				int word = rnd.nextInt(nrWords);
				// this is Zipf's distribution
				//int word = sampleWord(nrWords, rnd);
				
				Long2IntMap map = x2wordMap[x];
				if (map == null) {
					map = new Long2IntMap(nrEvents / nrX + 1);
					x2wordMap[x] = map;
				}
				map.addAndGet(word, 1);

				wordDist.addAndGet(word, 1);
			}

			double entropy = 0;
			double revTotalCount = 1.0 / nrEvents;
			for(Long2IntMap.Iterator it = wordDist.iterator(); it.hasNext(); ) {
				int count = it.next().getValue();
				double p = count * revTotalCount;
				entropy -= p * ProbMath.log2(p);
			}
			this.entropy = entropy;
		}
		
		public void doExchange() {
			HashMap<Integer, Long2IntMap> eventsByX = new HashMap<Integer, Long2IntMap>(nrX);
			for(int i=0; i<nrX; ++i) {
				Long2IntMap map = x2wordMap[i];
				if (map != null) {
					eventsByX.put(i, map);
				}
			}
			
			Pair<HashSet<Integer>, HashSet<Integer>> split = ExchangeAlgo.getExchangeWordSplit(eventsByX.keySet(), eventsByX, null);
			
			// compute split entropy
			double dist1Entropy = 0;
			double dist2Entropy = 0;
			long dist1Count = 0;
			long dist2Count = 0;
			
			{
				Long2IntMap dist1 = new Long2IntMap(nrWords);
				
				for(int x : split.getFirst()) {
					Long2IntMap map = x2wordMap[x];
					for(Long2IntMap.Iterator it = map.iterator(); it.hasNext(); ) {
						Long2IntMap.Entry e = it.next();
						dist1.addAndGet(e.getKey(), e.getValue());
						dist1Count += e.getValue();
					}
				}
				
				double revCount = 1.0 / dist1Count;
				
				for(Long2IntMap.Iterator it = dist1.iterator(); it.hasNext(); ) {
					int count = it.next().getValue();
					double p = count * revCount;
					dist1Entropy -= p * ProbMath.log2(p);
				}
			}
			
			{
				Long2IntMap dist2 = new Long2IntMap(nrWords);
				
				for(int x : split.getSecond()) {
					Long2IntMap map = x2wordMap[x];
					for(Long2IntMap.Iterator it = map.iterator(); it.hasNext(); ) {
						Long2IntMap.Entry e = it.next();
						dist2.addAndGet(e.getKey(), e.getValue());
						dist2Count += e.getValue();
					}
				}
				
				double revCount = 1.0 / dist2Count;
				
				for(Long2IntMap.Iterator it = dist2.iterator(); it.hasNext(); ) {
					int count = it.next().getValue();
					double p = count * revCount;
					dist2Entropy -= p * ProbMath.log2(p);
				}
			}
			
			newEntropy = (dist1Count * dist1Entropy + dist2Count * dist2Entropy) / (dist1Count + dist2Count);
		}
	}
	
	// take one sample from Zipf's distribution  
	private static int sampleWord(int nrWords, Random rnd) {
		double maxProb = zipfAccumulatedProbabilities[nrWords-1];
		double theProb = rnd.nextDouble() / maxProb;
		int pos = Arrays.binarySearch(zipfAccumulatedProbabilities, 0, nrWords, theProb);
		if (pos < 0) {
			pos = -pos - 1;
			if (pos == nrWords) {
				// hypothetically possible due to fp rounding
				pos = nrWords - 1;
			}
		}
		return pos;
	}
	
	
	private static void estimateZipfProbabilities(int nrWords) {
		double probs[] = new double[nrWords];
		double totalProb = 0;
		
		for(int i = 0; i < nrWords; ++i) {
			int rank = i + 1;
			double prob = 1.0 / rank; 
			probs[i] = prob;
			totalProb += prob;
		}
		
		double accProb = 0;
		for(int i = 0; i < nrWords; ++i) {
			accProb += probs[i] / totalProb;
			probs[i] = accProb;
		}
		
		zipfAccumulatedProbabilities = probs;
	}
	/**
	 * @param args
	 */
	public static void main(String[] args) {
        OptionParser optParser = new OptionParser(Options.class);
        final Options opts = (Options) optParser.parse(args, true);

		JobManager.initialize(opts.jobs);
		JobManager manager = JobManager.getInstance();
		Thread thread = new Thread(manager, "Job Manager");
		thread.setDaemon(true);
		thread.start();

		estimateZipfProbabilities(opts.nrWords);
		
		final String[] theSizesStr = opts.sizes.split(",");
		final int nrExperiments = theSizesStr.length;
		final double entropies[] = new double[nrExperiments];
		final double newEntropies[] = new double[nrExperiments];
		
		JobGroup group = manager.createJobGroup("group");
		
		for(int i = 0; i < nrExperiments; ++i) {
			final int exp = i;
			
			Runnable run = new Runnable() {

				@Override
				public void run() {
					int xSize = Integer.parseInt(theSizesStr[exp]);
					ExperimentRun expRun = new ExperimentRun( xSize, opts.nrWords, opts.nrEvents);
					expRun.doExperiment();
					entropies[exp] = expRun.entropy;
					newEntropies[exp] = expRun.newEntropy;
				}
				
			};
			
			Job job = new Job(run, "");
			manager.addJob(group, job);
		}
		group.join();
		
		for(int i=0; i<nrExperiments; ++i) {
			System.out.printf("[%s] H = %g, dH = %g\n", theSizesStr[i], entropies[i], entropies[i] - newEntropies[i]);
		}
	}

}
