/**
 * 
 */
package edu.umd.clip.lm.playground;

import java.io.FileInputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.*;

import edu.berkeley.nlp.util.*;
import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.factors.*;
import edu.umd.clip.lm.factors.Dictionary;
import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.model.data.*;
import edu.umd.clip.lm.model.training.*;
import edu.umd.clip.lm.util.*;
/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class ContextVariableEntropy {

	public static class Options {
        @Option(name = "-config", required = true, usage = "Output XML config file")
		public String config;
        @Option(name = "-jobs", usage = "number of concurrent jobs (default: 1)")
        public int jobs = 1;
        @Option(name = "-words", usage = "the word vocabulary size (default: 10000)")
        public int nrWords = 10000;
        @Option(name = "-events", usage = "the number of random events (corpus size) (default: 100000)")
        public int nrEvents = 100000;
        @Option(name = "-exp", usage = "the number of experiments (default: 10)")
        public int nrExperiments = 10;
        @Option(name = "-num-splits", usage = "number of splits (default: 8)")
        public int numSplits = 8;
        @Option(name = "-data", required = true, usage = "Comma-separated list of FLM data files")
		public String data;
	}
	
	private static Long2IntMap[] splitCounts(final Long2IntMap counts[], final int nway) {
		JobManager manager = JobManager.getInstance();
		JobGroup group = manager.createJobGroup("count splitting");

		final Long2IntMap[] newCounts = new Long2IntMap[counts.length * nway];

		for(int i=0; i<counts.length; ++i) {
			final int tag = i;
			Runnable run = new Runnable() {
				@Override
				public void run() {
					Long2IntMap myCounts = counts[tag];
					for(int k=0; k<nway; ++k) {
						newCounts[tag*nway + k] = new Long2IntMap((int) (myCounts.size()/Math.log(nway)));
					}
					
					Random rnd = new Random();
					int wayCounts[] = new int[nway];
					
					for(Long2IntMap.Iterator it=myCounts.iterator(); it.hasNext(); ) {
						
						Long2IntMap.Entry e = it.next();
						long word = e.getKey();
						for(int count = e.getValue() ;count > 0; --count) {
							wayCounts[rnd.nextInt(nway)]++;
						}
						for(int k=0; k<nway; ++k) {
							if (wayCounts[k] > 0) {
								newCounts[tag*nway + k].addAndGet(word, wayCounts[k]);
								wayCounts[k] = 0;
							}
						}
						//Arrays.fill(wayCounts, 0);
					}
				}
	
			};
			Job job = new Job(run, "a job");
			manager.addJob(group, job);
		}
		group.join();

		return newCounts;
	}
	
	public static Pair<Double,Double> doExchange(Long2IntMap[] counts, int runs) {
		double initialEntropy = 0;
		double newEntropy = Double.MAX_VALUE;
		{
			Long2IntMap dist1 = new Long2IntMap();
			long totalCount = 0;
			for(Long2IntMap map : counts) {
				for(Long2IntMap.Iterator it = map.iterator(); it.hasNext(); ) {
					Long2IntMap.Entry e = it.next();
					dist1.addAndGet(e.getKey(), e.getValue());
					totalCount += e.getValue();
				}
			}
			
			double revCount = 1.0 / totalCount;
			
			for(Long2IntMap.Iterator it = dist1.iterator(); it.hasNext(); ) {
				int count = it.next().getValue();
				double p = count * revCount;
				initialEntropy -= p * ProbMath.log2(p);
			}
		}

		Random rnd = new Random();
		for(; runs > 0; --runs) {
			HashMap<Integer, Long2IntMap> eventsByX = new HashMap<Integer, Long2IntMap>(counts.length);

			HashSet<Integer> randomSplit = new HashSet<Integer>(eventsByX.size() / 2);
			for(int i=0; i<counts.length; ++i) {
				Long2IntMap map = counts[i];
				if (map != null && map.size() > 0) {
					Integer key = i;
					eventsByX.put(key, map);
					if (rnd.nextDouble() < 0.5) {
						randomSplit.add(key);
					}
				}
			}
			
			Pair<HashSet<Integer>, HashSet<Integer>> split = ExchangeAlgo.getExchangeWordSplit(eventsByX.keySet(), eventsByX, randomSplit);
			
			// compute split entropy
			double dist1Entropy = 0;
			double dist2Entropy = 0;
			long dist1Count = 0;
			long dist2Count = 0;
			
			{
				Long2IntMap dist1 = new Long2IntMap();
				
				for(int x : split.getFirst()) {
					Long2IntMap map = counts[x];
					for(Long2IntMap.Iterator it = map.iterator(); it.hasNext(); ) {
						Long2IntMap.Entry e = it.next();
						dist1.addAndGet(e.getKey(), e.getValue());
						dist1Count += e.getValue();
					}
				}
				
				double revCount = 1.0 / dist1Count;
				
				for(Long2IntMap.Iterator it = dist1.iterator(); it.hasNext(); ) {
					int count = it.next().getValue();
					double p = count * revCount;
					dist1Entropy -= p * ProbMath.log2(p);
				}
			}
			
			{
				Long2IntMap dist2 = new Long2IntMap();
				
				for(int x : split.getSecond()) {
					Long2IntMap map = counts[x];
					for(Long2IntMap.Iterator it = map.iterator(); it.hasNext(); ) {
						Long2IntMap.Entry e = it.next();
						dist2.addAndGet(e.getKey(), e.getValue());
						dist2Count += e.getValue();
					}
				}
				
				double revCount = 1.0 / dist2Count;
				
				for(Long2IntMap.Iterator it = dist2.iterator(); it.hasNext(); ) {
					int count = it.next().getValue();
					double p = count * revCount;
					dist2Entropy -= p * ProbMath.log2(p);
				}
			}
			
			double runEntropy = (dist1Count * dist1Entropy + dist2Count * dist2Entropy) / (dist1Count + dist2Count);
			if (runEntropy < newEntropy) {
				newEntropy = runEntropy;
			}
		}
		return new Pair<Double,Double>(initialEntropy, newEntropy);
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
        OptionParser optParser = new OptionParser(Options.class);
        final Options opts = (Options) optParser.parse(args, true);

		JobManager.initialize(opts.jobs);
		JobManager manager = JobManager.getInstance();
		Thread thread = new Thread(manager, "Job Manager");
		thread.setDaemon(true);
		thread.start();

		Experiment.initialize(opts.config);
		Experiment experiment = Experiment.getInstance();
		FactorTupleDescription desc = experiment.getTupleDescription();
		final byte tagFactorIdx = desc.getFactorIndex("T");
		final byte mainFactorIdx = desc.getMainFactorIndex();
		
		Dictionary tagDict = desc.getDictionary("T");
		final Long2IntMap tag2wordCounts[] = new  Long2IntMap[tagDict.size()];
		for(int i=0; i<tag2wordCounts.length; ++i) {
			tag2wordCounts[i] = new Long2IntMap();
		}
		
		String[] dataFiles = opts.data.split(",");

		JobGroup group = manager.createJobGroup("getting counts");
		
		for(int i=0; i<dataFiles.length; ++i) {
			final String fname = dataFiles[i];
			Runnable run = new Runnable() {

				@Override
				public void run() {
					try {
						FileChannel channel = new FileInputStream(fname).getChannel();
						TrainingDataReader reader = new OnDiskTrainingDataReader(channel);
						ReadableTrainingData inputData = new ReadableTrainingData(reader);
						while(inputData.hasNext()) {
							TrainingDataBlock block = inputData.next();
							for(ContextFuturesPair pair : block) {
								long[] context = pair.getContext().data;
								int tag = FactorTuple.getValue(context[context.length-1], tagFactorIdx);
								Long2IntMap wordCounts = tag2wordCounts[tag];
								synchronized(wordCounts) {
									for(TupleCountPair tc : pair.getFutures()) {
										int word = FactorTuple.getValue(tc.tuple, mainFactorIdx);
										wordCounts.addAndGet(word, tc.count);
									}
								}
							}
						}
						channel.close();
					} catch(IOException e) {
						e.printStackTrace();
					}
				}
			};
			Job job = new Job(run, "a job");
			manager.addJob(group, job);
		}
		group.join();
		
		final double finalEntropies[] = new double[opts.numSplits+1];
		final double initialEntropies[] = new double[opts.numSplits+1];
		final double sigmas[] = new double[opts.numSplits+1];
		final int splits[] = new int[opts.numSplits+1];
		
		for(int split=0; split<=opts.numSplits; ++split) {
			//int nway = split+1;
			int nway = 1 << split;
			splits[split] = nway;
		}
		
		for(int i=0; i<=opts.numSplits; ++i) {
			final int split = i;
			
			Runnable run = new Runnable() {

				@Override
				public void run() {
					int nway = splits[split];
					double entropyReductions[] = new double[opts.nrExperiments];
					double avgReduction = 0;
					for(int i=0; i<opts.nrExperiments; ++i) {
						Long2IntMap counts[] = nway == 1 ? tag2wordCounts : splitCounts(tag2wordCounts, nway);
						Pair<Double,Double> pair = doExchange(counts, 1);
						entropyReductions[i] = pair.getFirst() - pair.getSecond();
						avgReduction += entropyReductions[i];
						//initialEntropies[split] = pair.getFirst();
						//finalEntropies[split] = pair.getSecond();
					}
					avgReduction /= entropyReductions.length;
					finalEntropies[split] = avgReduction;
					double sd = 0;
					for(int i=0; i<entropyReductions.length; ++i) {
						sd += (avgReduction - entropyReductions[i]) * (avgReduction - entropyReductions[i]);
					}
					sd /= entropyReductions.length - 1;
					sd = Math.sqrt(sd);
					sigmas[split] = sd;
				}
				
			};
			Job job = new Job(run, "a job");
			manager.addJob(group, job);
		}
		group.join();
		
		for(int split=0; split<=opts.numSplits; ++split) {
			final int nway = splits[split];
			double avgReduction = finalEntropies[split];
			double sigma = sigmas[split];
			System.out.printf("%d-way split: reduction=%f, standard deviation = %f\n", nway, avgReduction, sigma);
		}
	}

}
