/**
 * 
 */
package edu.umd.clip.lm.programs;

import java.io.*;
import java.nio.channels.*;
import java.util.*;

import edu.berkeley.nlp.util.*;

import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.model.data.Context;
import edu.umd.clip.lm.model.data.ContextFuturesPair;
import edu.umd.clip.lm.model.data.FilteredReadableTrainingData;
import edu.umd.clip.lm.model.data.MaskedFuturesTrainingDataFilter;
import edu.umd.clip.lm.model.data.OnDiskTrainingDataReader;
import edu.umd.clip.lm.model.data.ReadableTrainingData;
import edu.umd.clip.lm.model.data.TrainingDataBlock;
import edu.umd.clip.lm.model.data.TrainingDataFilter;
import edu.umd.clip.lm.model.data.TrainingDataReader;
import edu.umd.clip.lm.model.data.TrainingDataUtil;
import edu.umd.clip.lm.model.data.TupleCountPair;
import edu.umd.clip.lm.model.training.*;
import edu.umd.clip.lm.util.*;
import edu.umd.clip.smoothing.*;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class TagInfo {
	public static class Options {
        @Option(name = "-config", required = true, usage = "XML config file")
		public String config;
        @Option(name = "-train-data", required = true, usage = "binary data file")
		public String trainData;
        @Option(name = "-dev-data", required = true, usage = "binary data file")
		public String devData;
        @Option(name = "-cost", required = false, usage = "Cost function: (overt|joint) default: overt")
		public String cost = "overt";
	}

	private static class Entropies {
		double conditionalEntropy = 0;
		double jointEntropy = 0;
		double contextEntropy = 0;
		
		public String toString() {
			return String.format("H(ctx) = %g, H(w|ctx) = %g", contextEntropy, conditionalEntropy);
		}
	}
	
	private static final double ONE_COUNT_GAMMA = 15.0;
	private static final double ONE_COUNT_BETA = 1.0;
	
	private static Entropies computeConditionalEntropy(ReadableTrainingData trainData, long totalTrainCount,
			Map<Context,TupleCountPair[]> devCounts, Long2DoubleMap unigramDist) throws IOException {
		double conditionalEntropy = 0;
		//double jointEntropy = 0;
		double contextEntropy = 0;
		
		final double revTotalCount = 1.0 / totalTrainCount;
		long devTotalCount = 0;
		
		while(trainData.hasNext()) {
			TrainingDataBlock block = trainData.next();
			for(ContextFuturesPair pair : block) {
				//final double revCtxTotalCount = 1.0 / pair.getTotalCount();
				contextEntropy -= pair.getTotalCount() * revTotalCount * ProbMath.log2(pair.getTotalCount() * revTotalCount);
				
				TupleCountPair[] devFutures = devCounts.get(pair.getContext());
				if (devFutures == null) continue;
				
				// compute smoothed distribution on the train data
				SmootherProducer producer = new SmootherProducer();
				
				Long2IntMap trainCounts = new Long2IntMap(pair.getFutures().length);
				
				for(TupleCountPair tc : pair.getFutures()) {
					producer.addCount(tc.count);
					trainCounts.addAndGet(tc.tuple, tc.count);
					
					//final double totalProb = tc.count * revTotalCount;
					//conditionalEntropy -= totalProb * ProbMath.log2(tc.count * revCtxTotalCount);
					//jointEntropy -= totalProb * ProbMath.log2(totalProb);
				}
				
				Smoother smoother = new OneCountSmoother(producer, ONE_COUNT_BETA, ONE_COUNT_GAMMA);
				
				double likelihood = 0;
				for(TupleCountPair tc : devFutures) {
					int trainCount = trainCounts.get(tc.tuple);
					double prob = smoother.getProb(trainCount);
					double backoffProb = smoother.getBackoffProb() * unigramDist.get(tc.tuple);
					
					double totalTrainProb = prob + backoffProb;
					if (totalTrainProb <= 0) continue;
					
					likelihood -= tc.count * ProbMath.log2(totalTrainProb);
					devTotalCount += tc.count;
				}
				
				conditionalEntropy += likelihood;
			}
		}
		
		conditionalEntropy /= devTotalCount;
		
		Entropies result = new Entropies();
		result.conditionalEntropy = conditionalEntropy;
		//result.jointEntropy = jointEntropy;
		result.contextEntropy = contextEntropy;
		return result;
	}
	
	/**
	 * @param args
	 * @throws ClassNotFoundException 
	 * @throws IOException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException {
        OptionParser optParser = new OptionParser(Options.class);
        final Options opts = (Options) optParser.parse(args, true);

		Experiment.initialize(opts.config);
		Experiment experiment = Experiment.getInstance();
		experiment.getTupleDescription();
		
		long overtMask = Experiment.getInstance().getTupleDescription().getOvertFactorsMask();
		TrainingDataFilter filter = new MaskedFuturesTrainingDataFilter(overtMask);

		FileChannel trainDataChannel = new FileInputStream(opts.trainData).getChannel();
		TrainingDataReader trainDataReader = new OnDiskTrainingDataReader(trainDataChannel);
		ReadableTrainingData trainData; 

		FileChannel devDataChannel = new FileInputStream(opts.devData).getChannel();
		TrainingDataReader devDataReader = new OnDiskTrainingDataReader(devDataChannel);
		ReadableTrainingData devData; 


		if (opts.cost.equals("joint")) {
			trainData = new ReadableTrainingData(trainDataReader);
			devData = new ReadableTrainingData(devDataReader);
		} else {
			trainData = new FilteredReadableTrainingData(trainDataReader, filter);
			devData = new FilteredReadableTrainingData(devDataReader, filter);
		}
		
		LanguageModel lm = experiment.getLM();
		
		Long2DoubleMap unigramDist = new Long2DoubleMap(50000);
		long totalTrainCount = 0;
		while(trainData.hasNext()) {
			TrainingDataBlock block = trainData.next();
			for(ContextFuturesPair pair : block) {
				for(TupleCountPair tc : pair.getFutures()) {
					unigramDist.addAndGet(tc.tuple, tc.count);
					totalTrainCount += tc.count;
				}
			}
		}
		for(Long2DoubleMap.Iterator it = unigramDist.iterator(); it.hasNext();) {
			Long2DoubleMap.Entry entry = it.next();
			entry.setValue(entry.getValue() / totalTrainCount);
		}
		
		System.out.println("Cost function: " + opts.cost);
		
		for(int overtOrder = 2; overtOrder <= lm.getOrder(); ++overtOrder) {
			for(int hiddenOrder = 1; hiddenOrder <= overtOrder; ++hiddenOrder) {
				trainDataChannel = new FileInputStream(opts.trainData).getChannel();
				trainDataReader = new OnDiskTrainingDataReader(trainDataChannel);
				//trainData = new ReadableTrainingData(trainDataReader);
				trainData = new FilteredReadableTrainingData(trainDataReader, filter);
				
				devDataChannel = new FileInputStream(opts.devData).getChannel();
				devDataReader = new OnDiskTrainingDataReader(devDataChannel);
				devData = new FilteredReadableTrainingData(devDataReader, filter);
				//trainData.reset();
				//devData.reset();
				
				ReadableTrainingData d1;
				ReadableTrainingData d2;
				if (overtOrder < lm.getOvertOrder() || hiddenOrder < lm.getHiddenOrder()) {
					d1 = TrainingDataUtil.makeContextReducedTrainingData(trainData, overtOrder, hiddenOrder);
					d2 = TrainingDataUtil.makeContextReducedTrainingData(devData, overtOrder, hiddenOrder);
				} else {
					d1 = trainData;
					d2 = devData;
				}
				HashMap<Context,TupleCountPair[]> devCounts = new HashMap<Context,TupleCountPair[]>(10000);
				while(d2.hasNext()) {
					TrainingDataBlock block = d2.next();
					for(ContextFuturesPair pair : block) {
						devCounts.put(pair.getContext(), pair.getFutures());
					}
				}
				
				Entropies entropies = computeConditionalEntropy(d1, totalTrainCount, devCounts, unigramDist);
				System.out.printf("ctx = w_{%d} t_{%d} : %s\n", overtOrder-1, hiddenOrder-1, entropies);
			}
		}
	}

}
