/**
 * 
 */
package edu.umd.clip.lm.tests;

import edu.berkeley.nlp.util.*;

import java.io.*;
import java.nio.ByteBuffer;
import java.util.*;

import edu.umd.clip.lm.model.*;
import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.factors.*;
import edu.umd.clip.lm.storage.AbstractProbTreeStorage;
import edu.umd.clip.lm.storage.BDBProbTreeStorage;
import edu.umd.clip.lm.util.ProbMath;
import edu.umd.clip.lm.util.tree.BinaryTree;

import java.lang.management.*;

import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.Environment;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class TestDistributions {
	public static class Options {
        @Option(name = "-config", required = true, usage = "XML config file")
		public String config;
        @Option(name = "-jobs", usage = "number of concurrent jobs (default: 1)")
        public int jobs = 1;
        @Option(name = "-lm", required = false, usage = "LM ID to train (default: " + LanguageModel.PRIMARY_LM_ID + ")")
		public String lm = LanguageModel.PRIMARY_LM_ID;        
        @Option(name = "-runs", usage = "number of tests (default: 10)")
        public int runs = 10;
        @Option(name = "-meminfo", usage = "track memory usage (default: false)")
        public boolean meminfo = false;
	}

	/**
	 * @param args
	 * @throws DatabaseException 
	 */
	public static void main(String[] args) throws DatabaseException {
		final MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean();
		
		MemoryUsage memuse;
		
        OptionParser optParser = new OptionParser(Options.class);
        final Options opts = (Options) optParser.parse(args, true);

		if (opts.meminfo) {
			memuse = memoryBean.getHeapMemoryUsage();
			System.out.printf("MEMUSE: initial: %dM/%dM\n", memuse.getUsed()/1048576, memuse.getMax()/1048576);
		}

		Experiment.initialize(opts.config);
		final Experiment experiment = Experiment.getInstance();

		if (opts.meminfo) {
			memuse = memoryBean.getHeapMemoryUsage();
			System.out.printf("MEMUSE: after Experiment.initialize(): %dM/%dM\n", memuse.getUsed()/1048576, memuse.getMax()/1048576);
		}
		
		JobManager.initialize(opts.jobs);
		Thread thread = new Thread(JobManager.getInstance(), "Job Manager");
		thread.setDaemon(true);
		thread.start();

		experiment.buildPrefixes();
		experiment.buildWordPrefixes();

		DecoderCompactProbTree emptyTree = DecoderCompactProbTree.constructEmptyTree(Experiment.getInstance().getHFT().getTree());
		DecoderCompactProbTree.setEmptyTree(emptyTree);
		
		Environment env = BDBProbTreeStorage.createEnvironment("data", false);

		final LanguageModel lm = experiment.getLM(opts.lm);
		final BDBProbTreeStorage storage = new BDBProbTreeStorage(env);
		storage.open(lm.getId(), lm.getIdNum(), false);

		final Collection<FactorTuple> allOvertFactors = experiment.getTupleDescription().getAllOvertFactors().keySet();
		final double probs[] = new double[opts.runs];
		
		//experiment.closeXML();
		
		class Test implements Runnable {
			int num;
			public Test(int num) {
				this.num = num;
			}
			public void run() {
				
				while(true) {
					BinaryTree<HistoryTreePayload> tree = lm.getHistoryTree();
					while(!tree.isLeaf()) {
						if (Math.random() < 0.5) {
							tree = tree.getLeft();
						} else {
							tree = tree.getRight();
						}
					}

					if (tree.getPayload().isBackoff) {
						// ignore backoff clusters
						continue;
					}
					
					LinkedList<Pair<HashMap<FactorTuple,OnDiskCompactProbTree>,Double>> distributions = 
						new LinkedList<Pair<HashMap<FactorTuple,OnDiskCompactProbTree>,Double>>();
					
					double lambda = 1.0;
					double backoff = 0.0;
					BinaryTree<HistoryTreePayload> leaf = tree;
					
					while(tree != null) {
						int clusterid = tree.getPayload().clusterid;
						HashMap<FactorTuple,OnDiskCompactProbTree> dist = new HashMap<FactorTuple,OnDiskCompactProbTree>();

						for(FactorTuple word : allOvertFactors) {
							OnDiskCompactProbTree probTree = storage.getProbTree(lm.getIdNum(), clusterid, word);
							if (probTree != null) {
								dist.put(word, probTree);
							}
						}
						double scale = lambda * tree.getPayload().lambda;
						backoff += scale * tree.getPayload().backoff;
						
						distributions.add(new Pair<HashMap<FactorTuple,OnDiskCompactProbTree>,Double>(dist, scale));
						lambda *= 1.0 - tree.getPayload().lambda;
						
						tree = tree.getParent();
					}
					 
					double totalProb = 0.0;
					for(FactorTuple overtFactors : allOvertFactors) {
						ArrayList<OnDiskCompactProbTree> trees = new ArrayList<OnDiskCompactProbTree>(distributions.size());
						for(Pair<HashMap<FactorTuple,OnDiskCompactProbTree>,Double> pair : distributions) {
							HashMap<FactorTuple,OnDiskCompactProbTree> dist = pair.getFirst();
							double scale = pair.getSecond();
							OnDiskCompactProbTree probTree = dist.get(overtFactors);
							if (probTree != null) {
								probTree.scale(scale);
								trees.add(probTree);
							}
						}
						OnDiskCompactProbTree probTree = OnDiskCompactProbTree.merge(trees);
						if (probTree == null) {
							//FactorTuple tuple = experiment.getTupleDescription().createTuple();
							//tuple.setOvertValues(overtFactors.factors);
							//System.out.printf("zero prob for %s\n", tuple.toStringNoNull());
						} else {
							double prob = probTree.getTotalProb();
							totalProb += prob;
						}
					}
					probs[num] = totalProb / (1 - backoff);
					if (!ProbMath.approxEqual(probs[num], 1.0)) {
						System.out.printf("[%d] prob = %g, backoff = %g\n", leaf.getPayload().clusterid, probs[num], backoff);
					}
					return;
				}
				
			}
		}
		
		JobManager manager = JobManager.getInstance();
		JobGroup group = manager.createJobGroup("test group");
		
		for(int i=0; i<opts.runs; ++i) {
			Job job = new Job(new Test(i), "test");
			manager.addJob(group, job);
		}
		group.join();
		
		double min = 100;
		double max = 0;
		double sum = 0;
		
		for(double prob : probs) {
			if (prob < min) min = prob;
			if (prob > max) max = prob;
			sum += prob;
		}
		
		System.out.printf("Min: %e, Max: %e, Avg: %e\n", min, max, sum / probs.length);
	}

}
