/**
 * 
 */
package edu.umd.clip.lm.model.training;

import java.io.*;
import java.nio.channels.*;
import java.util.*;
import java.util.concurrent.locks.*;

import com.sleepycat.je.*;

import edu.berkeley.nlp.util.*;
import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.factors.FactorTuple;
import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.model.data.*;
import edu.umd.clip.lm.util.*;
import edu.umd.clip.lm.util.tree.*;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class NewEMSmoother {
	private SmoothingDataStorage distStorage;
	private BinaryTree<HistoryTreePayload> theTree;
	private LanguageModel lm;
	private BinaryTree<HistoryTreePayload> nodes[];
	private int nrDataFiles;
	private int nrClusters;
	private int maxClusterId;
	private Set<FactorTuple> allOvertFactors;
	private final File dataDir;
	private Environment env;
	private String dumpLambdasPrefix;
	
	private static final double MIN_LAMBDA = 1e-5;
	// interpolate lambdas
	private static final boolean geomMean = true;

	@SuppressWarnings("unchecked")
	public NewEMSmoother(LanguageModel lm, File dbDir, final File dataDir, boolean clear, String dumpLambdasPrefix) throws DatabaseException {
		this.lm = lm;
		this.theTree = lm.getHistoryTree();
		this.dataDir = dataDir;
		this.dumpLambdasPrefix = dumpLambdasPrefix;
		
		maxClusterId = 0;
		nrClusters = 0;
		for(BinaryTreeIterator<HistoryTreePayload> it = theTree.getPostOrderIterator(); it.hasNext(); ) {
			int clusterId = it.next().clusterid;
			if (clusterId > maxClusterId) maxClusterId = clusterId;
			++nrClusters;
		}
		
		++maxClusterId;
		nodes = new BinaryTree[maxClusterId];
		for(BinaryTreeIterator<HistoryTreePayload> it = theTree.getPostOrderIterator(); it.hasNext(); ) {
			BinaryTree<HistoryTreePayload> node = it.nextNode();
			nodes[node.getPayload().clusterid] = node;
		}
		
		Experiment exp = Experiment.getInstance();
		this.nrDataFiles = exp.getFiles().getSmoothData().size();
		allOvertFactors = exp.getTupleDescription().getAllOvertFactors().keySet(); 
		
		EnvironmentConfig envConf = new EnvironmentConfig();
		envConf.setAllowCreate(true);
		envConf.setSharedCache(true);
		envConf.setTransactional(false);
		envConf.setReadOnly(false);
		envConf.setCachePercent(20);
		envConf.setConfigParam("je.log.fileMax", Integer.toString(100*1024*1024));
		
		if (!dbDir.isDirectory()) {
			dbDir.mkdirs();
		}
		env = new Environment(dbDir, envConf);

		distStorage = new BDBSmoothingDataStorage(env, clear);
		
	}
	
	public void initialize(final int startLevel) throws Exception 
	{
		assert(dataDir.isDirectory());
		
		Experiment exp = Experiment.getInstance();

		Experiment.Files files = exp.getFiles();
		
		JobManager manager = JobManager.getInstance();
		
		if (startLevel == 0) {
			// initialize from the training data
			long overtMask = Experiment.getInstance().getTupleDescription().getOvertFactorsMask();
			TrainingDataFilter filter = new MaskedFuturesTrainingDataFilter(overtMask);

			for(byte split = 0; split < files.getSmoothData().size(); ++split) {
				final String[] filenames = files.getSmoothDataFiles(split);

				TrainingDataUtil.combineAndReduceContext(filenames, 
						new File(dataDir, makeDataFilename(0, split)).getAbsolutePath(), 
						lm.getOvertOrder(), lm.getHiddenOrder(), 
						lm.getHistoryTree().getPayload().clusterid, filter);

			}
		}
		
		JobGroup group = manager.createJobGroup("populating data");
		
		for(byte dataId=0; dataId < nrDataFiles; ++dataId) {
			final byte id = dataId;
			
			Runnable run = new Runnable() {

				@Override
				public void run() {
					try {
						int level = startLevel;
						while(true) {
							File dataFile = new File(dataDir, makeDataFilename(level, id));
							if (!dataFile.isFile()) {
								System.err.printf("Cannot find file %s, ", dataFile.getAbsolutePath());
								if (level > 0) {
									System.err.println("starting from level 0");
									initialize(0);
									return;
								}
								System.err.println("bailing out");
								throw new Error("data file "+dataFile.getAbsolutePath()+" not found");
							}

							FileChannel inputChannel = new FileInputStream(dataFile).getChannel();
							TrainingDataNodeReader reader = new EagerTrainingDataNodeReader(new OnDiskTrainingDataNodeReader(inputChannel));

							RandomAccessFile outFile = new RandomAccessFile(new File(dataDir, makeDataFilename(level+1, id)), "rw");
							outFile.getChannel().truncate(0);

							OnDiskTrainingDataNodeWriter writer = new OnDiskTrainingDataNodeWriter(outFile.getChannel());

							int numNewNodes = populateLevel(id, reader, writer);

							outFile.close();

							dataFile.delete();
							inputChannel.close();
							if (numNewNodes == 0) break;
							++level;
						}

					} catch(Exception e) {
						e.printStackTrace();
					}
				}
			};
			manager.addJob(group, new Job(run, "data #" + Byte.toString(id)));
		}
		group.join();
		
		distStorage.sync();
	}
	
	private int populateLevel(byte dataId, TrainingDataNodeReader reader, TrainingDataNodeWriter writer) throws IOException {
		int nextLevelNodeCount = 0;
		
		int nrPopulated = 0;
		long totalCount = 0;
		long leafDataCount = 0;
		long backoffDataCount = 0;
		
		//reader.reset();
		for(ReadableTrainingDataNode nodeData = reader.getNext(); nodeData != null; nodeData = reader.getNext()) {
			BinaryTree<HistoryTreePayload> oldLeaf = nodes[nodeData.getNodeId()];

			
			Long2IntMap counts = new Long2IntMap();
			
			if (oldLeaf.isLeaf()) {
				// compute counts
				while(nodeData.getData(0).hasNext()) {
					nodeData.getData(0).next().addCounts(counts);
				}
				if (oldLeaf.getPayload().isBackoff) {
					backoffDataCount += counts.sumValues();
					continue;
				} 

				leafDataCount += counts.sumValues();
			} else {
				nextLevelNodeCount += 2;
				
				BinaryTree<HistoryTreePayload> left = oldLeaf.getLeft();
				BinaryTree<HistoryTreePayload> right = oldLeaf.getRight();
				
				int leftNodeId = left.getPayload().clusterid;
				int rightNodeId = right.getPayload().clusterid;
				
				WritableTrainingDataNode leftDataNode = writer.createNode(leftNodeId, 1);
				WritableTrainingDataNode rightDataNode = writer.createNode(rightNodeId, 1);
				
				writer.add(leftDataNode);
				writer.add(rightDataNode);
				
				TrainingDataUtil.splitData(nodeData.getData(0), oldLeaf.getPayload().question, 
						rightDataNode.getData(0), leftDataNode.getData(0), counts);
	
			}
			++nrPopulated;
			CountDistribution dist = new CountDistribution(counts, counts.sumValues());
			distStorage.putDistribution(oldLeaf.getPayload().clusterid, dataId, dist);
			totalCount += dist.getTotalCount();
		}	
		
		System.out.printf("dataid %d, level count: %d, in leaves: %d, in backoff: %d, clusters: %d\n", 
				dataId, totalCount, leafDataCount, backoffDataCount, nrPopulated);
		return nextLevelNodeCount;
	}
	
	public void doCrossValidatingSmoothing(int maxIter, int bucketSize, int pruningThreshold, 
			boolean useLambdas, byte singleFold, boolean dummySmoother, double discountScale) 
	{
		SmoothingRun runs[] = new SmoothingRun[nrDataFiles];
		
		for(byte devId = 0; devId < nrDataFiles; ++devId) {
			if (singleFold >= 0 && devId != singleFold) continue;
			
			byte trainIds[] = new byte[nrDataFiles > 2 ? nrDataFiles-2 : nrDataFiles - 1];
			byte devIds[] = new byte[1];
			byte heldoutId;
			{
				byte i = 0;
				if (nrDataFiles > 2) {
					heldoutId = (byte) ((devId + 1) % nrDataFiles);
				} else {
					heldoutId = devId;
					System.err.println("NOTE: heldout data is the same as the dev data (debug only)");
				}
				devIds[0] = devId;
				for(byte id = 0; id < nrDataFiles; ++id) {
					if (id != devId && id != heldoutId) trainIds[i++] = id;
				}
			}
			System.out.printf("=================== Run %d/%d =====================\n", (devId+1), nrDataFiles);
			
			long startTime = System.currentTimeMillis();
			
			runs[devId] = new SmoothingRun(trainIds, devIds, heldoutId, pruningThreshold);
			runs[devId].initialize(useLambdas, dummySmoother, discountScale);
			
			long endTime = System.currentTimeMillis();
			
			System.out.printf("initialization took %g seconds\n", 0.001 * (endTime - startTime));

			long emStartTime = System.currentTimeMillis();
			
			double entropy = runs[devId].computeEntropy(heldoutId);
			
			System.out.printf("Initial entropy: %g (PPL=%g)\n", entropy, Math.pow(10, entropy));
			
			if (dummySmoother) {
				System.out.println("Using dummy smoother, skipping dev entropy");
			} else {
				entropy = runs[devId].computeDevEntropy(heldoutId);
				
				System.out.printf("Dev data entropy: %g (PPL=%g)\n", entropy, Math.pow(10, entropy));
			}
			runs[devId].run(maxIter, bucketSize);
			
			long emEndTime = System.currentTimeMillis();
			
			System.out.printf("run %d took %g seconds\n", (devId+1), 0.001 * (emEndTime - emStartTime));

			runs[devId].clean();
		}
		
		double lambdas[] = new double[maxClusterId];
		
		if (geomMean) {
			Arrays.fill(lambdas, 1.0);
		}
		
		for(int fold=0; fold<runs.length; ++fold) {
			if (singleFold >= 0) {
				if (fold == singleFold) {
					System.arraycopy(runs[fold].lambdas, 0, lambdas, 0, runs[fold].lambdas.length);
				}
				continue;
			}
			double[] l = runs[fold].lambdas;
			if (geomMean) {
				for(int clusterid=0; clusterid<maxClusterId; ++clusterid) {
					if (nodes[clusterid] == null) continue;
					lambdas[clusterid] *= Math.pow(l[clusterid], 1.0 / runs.length); 
				}
			} else {
				for(int clusterid=0; clusterid<maxClusterId; ++clusterid) {
					if (nodes[clusterid] == null) continue;
					lambdas[clusterid] += l[clusterid] / runs.length; 
				}
			}
		}
	
		if (singleFold < 0) {
			// compute deviation
			double sigma = 0;
			for(int fold=0; fold<runs.length; ++fold) {
				double[] l = runs[fold].lambdas;
				for(int clusterid=0; clusterid<maxClusterId; ++clusterid) {
					if (nodes[clusterid] == null) continue;
					sigma += (lambdas[clusterid] - l[clusterid])*(lambdas[clusterid] - l[clusterid]); 
				}
			}
			sigma /= runs.length * nrClusters;
			sigma = Math.sqrt(sigma);
			System.out.printf("Lambda standard deviation = %g\n", sigma);
		}
		
		for(int i=0; i<nodes.length; ++i) {
			if (nodes[i] != null) {
				nodes[i].getPayload().lambda = lambdas[i];
				nodes[i].getPayload().top = false;
			}
		}
		
		theTree.getPayload().top = true;
		theTree.getPayload().lambda = 1.0;

		/*
		if (pruneNodes) {
			BinaryTreeIterator<HistoryTreePayload> iterator = theTree.getPostOrderIterator();
			int nrPruned = 0;
			for(; iterator.hasNext(); ) {
				BinaryTree<HistoryTreePayload> node = iterator.nextNode();
				BinaryTree<HistoryTreePayload> left = node.getLeft();
				BinaryTree<HistoryTreePayload> right = node.getRight();
				if (left != null && left.isLeaf() && right != null && right.isLeaf()) {
					if ((left.getPayload().lambda < MIN_LAMBDA || left.getPayload().isBackoff) 
							&& right.getPayload().lambda < MIN_LAMBDA) 
					{
						++nrPruned;
						node.setLeft(null);
						node.setRight(null);
					}
				}
			}
			
			nrClusters -= nrPruned;
			System.out.printf("Pruned %d nodes, left %d\n", nrPruned, nrClusters);
		}
		*/
	}
	
	public void pruneNodes() {
		// forest.getMaxClusters() denotes the number of leaves, the number of clusters is double of that (minus one but who's counting)
		if (nrClusters <= lm.getMaxClusters() * 2) return;
		
		// compute counts
		final double[] clusterScores = new double[maxClusterId];
		
		JobManager manager = JobManager.getInstance();
		JobGroup group = manager.createJobGroup("get cluster counts");
		
		for(int id = 0; id<maxClusterId; ++id) {
			final int clusterid = id;
			
			if (nodes[clusterid] == null) continue;
			
			Runnable run = new Runnable() {
				@Override
				public void run() {
					long clusterCount = 0;
					for(byte dataId = 0; dataId < nrDataFiles; ++dataId) {
						CountDistribution dist = distStorage.getDistribution(clusterid, dataId);
						if (dist != null) {
							clusterCount += dist.getTotalCount();
						}
					}
					clusterScores[clusterid] = clusterCount;
				}
			};
			Job job = new Job(run, "");
			manager.addJob(group, job);
		}
		group.join();
		
		prune(clusterScores);
	}
	
	private void prune(final double[] clusterScores) {
		int leavesToCut = nrClusters - lm.getMaxClusters() * 2;

		BinaryTreeIterator<HistoryTreePayload> iterator;
		Comparator<BinaryTree<HistoryTreePayload>> cmp = new Comparator<BinaryTree<HistoryTreePayload>>() {
			
			private double getScore(BinaryTree<HistoryTreePayload> item) {
				//return Math.min(item.getLeft().getPayload().lambda, item.getRight().getPayload().lambda);
				return clusterScores[item.getPayload().clusterid];
			}
			public int compare(BinaryTree<HistoryTreePayload> item1, BinaryTree<HistoryTreePayload> item2) {
				double score1 = getScore(item1);
				double score2 = getScore(item2);
				return score1 < score2 ? -1 : (score1 > score2 ? 1 : 0);
			}
		};
		
		final PriorityQueue<BinaryTree<HistoryTreePayload>> preterminals = 
			new PriorityQueue<BinaryTree<HistoryTreePayload>>(nrClusters/2 + 1, cmp);

		iterator = theTree.getPostOrderIterator();
		while(iterator.hasNext()) {
			BinaryTree<HistoryTreePayload> node = iterator.nextNode();
			if (!node.isLeaf() && node.getLeft().isLeaf() && node.getRight().isLeaf()) {
				preterminals.add(node);
			}
		}
		
		while (leavesToCut > 0) {
			// prune a couple of leaves
			BinaryTree<HistoryTreePayload> node = preterminals.remove();
			BinaryTree<HistoryTreePayload> left = node.getLeft();
			BinaryTree<HistoryTreePayload> right = node.getRight();

			// update parent's lambda
			double leftCount = left == null ? 0 : clusterScores[left.getPayload().clusterid];
			double rightCount = right == null ? 0 : clusterScores[right.getPayload().clusterid];
			double parentCount = clusterScores[node.getPayload().clusterid];

			System.out.printf("pruning node [%d]: count=%g, left=%g, right=%g\n", node.getPayload().clusterid,
					parentCount, leftCount, rightCount);
			
			node.setLeft(null);
			node.setRight(null);
			node.getPayload().question = null;
			
			BinaryTree<HistoryTreePayload> parent = node.getParent();
			if (parent.getRight().isLeaf() && parent.getLeft().isLeaf()) {
				preterminals.add(parent);
				int parentId = parent.getPayload().clusterid;
				System.out.printf("new preterminal [%d], count = %g\n", parentId, clusterScores[parentId]);
			}
			--leavesToCut;
		}
		nrClusters = lm.getMaxClusters();
		
	}
	
	private class DistributionCache extends LRUCache<Integer, Long2DoubleMap> {
		private byte dataIds[];
		private int pruningThreshold;

		public DistributionCache(byte dataIds[], int pruningThreshold) {
			super(2000, 2000);
			this.dataIds = dataIds;
			this.pruningThreshold = pruningThreshold;
		}
		/* (non-Javadoc)
		 * @see edu.umd.clip.lm.util.LRUCache#loadItem(java.lang.Object)
		 */
		@Override
		protected Long2DoubleMap loadItem(Integer clusterid) {
			return makeDistribution(clusterid, pruningThreshold, dataIds);
		}
	}
	
	private class SmoothingRun {
		private double[] lambdas;
		private int[][] buckets;

		private byte[] trainingDataIds;
		private byte[] devDataIds;
		private byte heldoutDataId;
		private long clusterCounts[];
		private int clusterSizes[];
		private int pruningThreshold;
		
		private DistributionCache distCache;
		
		private static final String DEV_DIST_DB_NAME = "dev-dist";
		Database db;
		TreeNgramDistribution treeNgramDist;
		
		/**
		 * @param trainingDataIds
		 * @param devDataIds
		 */
		public SmoothingRun(byte[] trainingDataIds, byte[] devDataIds, byte heldoutDataId, int pruningThreshold) {
			this.trainingDataIds = trainingDataIds;
			this.heldoutDataId = heldoutDataId;
			// debug:
			//this.trainingDataIds = Arrays.copyOf(trainingDataIds, 1);
			this.devDataIds = devDataIds;
			this.pruningThreshold = pruningThreshold;
			
			this.distCache = new DistributionCache(this.trainingDataIds, this.pruningThreshold);
		}
		
		private void generateDevDistributions(boolean dummySmoother, double discountScale) throws IOException, DatabaseException {
			Experiment exp = Experiment.getInstance();

			Experiment.Files files = exp.getFiles();
			
			File devDataFile =  new File(dataDir, "dev-data");

			long overtMask = Experiment.getInstance().getTupleDescription().getOvertFactorsMask();
			TrainingDataFilter filter = new MaskedFuturesTrainingDataFilter(overtMask);
			
			ArrayList<String> devFiles = new ArrayList<String>();
			for(byte dId : devDataIds) {
				for(String f : files.getSmoothDataFiles(dId)) {
					devFiles.add(f);
				}
			}
			
			TrainingDataUtil.combineAndReduceContext(devFiles.toArray(new String[devFiles.size()]), 
					devDataFile.getAbsolutePath(), 
					lm.getOvertOrder(), lm.getHiddenOrder(), 
					theTree.getPayload().clusterid, filter);

			DatabaseConfig dbConf = new DatabaseConfig();
			dbConf.setReadOnly(false);
			dbConf.setAllowCreate(true);
			dbConf.setTransactional(false);
			dbConf.setDeferredWrite(true);

			try {
				env.truncateDatabase(null, DEV_DIST_DB_NAME, false);
			} catch(DatabaseException e) {}
			
			db = env.openDatabase(null, DEV_DIST_DB_NAME, dbConf);
			treeNgramDist = new TreeNgramDistribution(lm, db, dataDir);
			treeNgramDist.initialize(devDataFile, dummySmoother, discountScale);
		}
		
		public void initialize(boolean useLambdas, final boolean dummySmoother, final double discountScale) {
			JobManager manager = JobManager.getInstance();
			JobGroup group = manager.createJobGroup("cluster counts");
			
			{
				Runnable run = new Runnable() {

					@Override
					public void run() {
						try {
							generateDevDistributions(dummySmoother, discountScale);
						} catch (IOException e) {
							e.printStackTrace();
						} catch (DatabaseException e) {
							e.printStackTrace();
						}
					}
				};
				
				manager.addJob(group, new Job(run, "dev distributions"));
			}
			
			lambdas = new double[maxClusterId];
			if (useLambdas) {
				for(BinaryTreeIterator<HistoryTreePayload> i = theTree.getPreOrderIterator(); i.hasNext(); ) {
					HistoryTreePayload payload = i.next();
					lambdas[payload.clusterid] = payload.lambda;
				}
			} else {
				for(int i=0; i<maxClusterId; ++i) {
					lambdas[i] = 0.5 + (Math.random() - 0.5) * 0.1;
				}
			}
			clusterCounts = new long[maxClusterId];
			clusterSizes = new int[maxClusterId];

			for(byte dataId : trainingDataIds) {
				CountDistribution dataDist = distStorage.getDistribution(theTree.getPayload().clusterid, dataId);
				if (dataDist == null) {
					System.out.printf("data #%d root cluster is null\n", dataId);
				} else {
					System.out.printf("data #%d root cluster count %d\n", dataId, dataDist.getTotalCount());
				}
			}
			
			for(byte dataId : devDataIds) {
				CountDistribution dataDist = distStorage.getDistribution(theTree.getPayload().clusterid, dataId);
				if (dataDist == null) {
					System.out.printf("data #%d root cluster is null\n", dataId);
				} else {
					System.out.printf("data #%d root cluster count %d\n", dataId, dataDist.getTotalCount());
				}
			}
			
			//final float avgClusterCounts[] = new float[nrClusters];
			for(int clusterid=0; clusterid<maxClusterId; ++clusterid) {
				final int cid = clusterid;
				if (nodes[clusterid] == null) continue;
				
				Runnable run = new Runnable() {
					@Override
					public void run() {
						Long2IntMap map = null;
						long totalCount = 0;
						for(byte dataId : trainingDataIds) {
							CountDistribution dataDist = distStorage.getDistribution(cid, dataId);
							if (dataDist != null) {
								//System.err.printf("dist clusterid %d, dataid %d count = %d\n", cid, dataId, dataDist.totalCount);
								totalCount += dataDist.getTotalCount();
								if (trainingDataIds.length == 1) {
									map = dataDist.getCounts();
								} else {
									if (map == null) {
										map = (Long2IntMap) dataDist.getCounts().clone();
									} else {
										map.addMap(dataDist.getCounts());
									}
								}
							} else {
								//System.err.printf("null dist clusterid %d, dataid %d\n", cid, dataId);
							}
						}
						clusterCounts[cid] = totalCount;
						if (map != null) {
							clusterSizes[cid] = map.size();
						}
						// an approximation
						//avgClusterCounts[cid] = (float) totalCount / nr_items ;
					}
					
				};
				manager.addJob(group, new Job(run, "cluster #" + Integer.toString(cid)));
			}
			group.join();
			
			long totalTrainingCount = 0;
			for(BinaryTreeIterator<HistoryTreePayload> it = theTree.getLeafIterator(); it.hasNext();) {
				totalTrainingCount += clusterCounts[it.next().clusterid];
			}
			long rootCount = clusterCounts[theTree.getPayload().clusterid];
			System.out.printf("Total training counts: leaves %d, root %d\n", 
					totalTrainingCount, rootCount);
		}
		
		@SuppressWarnings("unchecked")
		private void createBuckets(int bucketSize) {
			Pair<Double,Integer> counts[] = new Pair[maxClusterId];

			int numBuckets = 0;
			for(int clusterid = 0; clusterid < maxClusterId; ++clusterid) {
				long count = clusterCounts[clusterid];
				if (count > 0) {
					counts[numBuckets++] = new Pair<Double,Integer>((double)count, clusterid);
				}
			}
			System.out.println("Number of non-zero clusters: " + numBuckets);
			
			Comparator<Pair<Double,Integer>> cmp = new Comparator<Pair<Double,Integer>>() {
				@Override
				public int compare(Pair<Double, Integer> arg0,
						Pair<Double, Integer> arg1) {
					return (int)Math.signum(arg0.getFirst() - arg1.getFirst());
				}
			};
			
			Arrays.sort(counts, 0, numBuckets, cmp);
			
			ArrayList<int[]> buckets = new ArrayList<int[]>();
			int pos = 1;
			int accumulatedCounts = 0;
			ArrayList<Integer> currentBucket = new ArrayList<Integer>();
			while(pos < numBuckets) {
				int clusterid = counts[pos++].getSecond(); 
				currentBucket.add(clusterid);
				
				accumulatedCounts += clusterCounts[clusterid];
				
				if (accumulatedCounts > bucketSize || pos == counts.length) {
					int clusters[] = new int[currentBucket.size()];
					int i = 0;
					for(int cluster : currentBucket) {
						clusters[i++] = cluster;
					}
					buckets.add(clusters);
					
					accumulatedCounts = 0;
					currentBucket.clear();
				}
			}
			int bucketid = 0;
			
			this.buckets = new int[buckets.size()][];
			for(int[] bucket : buckets) {
				this.buckets[bucketid++] = bucket;
			}
			System.out.printf("%d buckets out of %d clusters\n", this.buckets.length, nrClusters);
		}
		
		public void run(int maxIter, int bucketSize) {
			if (bucketSize > 1) {
				createBuckets(bucketSize);
			}
			
			for(int i=0; i<maxIter; ++i) {
				long startTime = System.currentTimeMillis();
				double[] newLambdas = EMstep();
				long endTime = System.currentTimeMillis();
				double entropy = computeEntropy(heldoutDataId);
				System.out.printf("Iteration %d/%d took %g seconds, entropy=%g (PPL = %g)\n", 
						i+1, maxIter, 0.001 * (endTime - startTime), entropy, Math.pow(10, entropy));
				
				if (dumpLambdasPrefix != null) {
					dumpLambdas(String.format("%s-%02d", dumpLambdasPrefix, i+1));
					dumpLeafLambdas(String.format("%s-leaf-%02d", dumpLambdasPrefix, i+1));
				}
				lambdas = newLambdas;
			}
		}
		
		private void dumpLambdas(String fname) {
			try {
				PrintStream ps = new PrintStream(new FileOutputStream(fname));
				for(double lambda : lambdas) {
					ps.println(lambda);
				}
				ps.close();
			} catch (FileNotFoundException e) {
				e.printStackTrace();
			}
		}
		
		private void dumpLeafLambdas(String fname) {
			try {
				PrintStream ps = new PrintStream(new FileOutputStream(fname));
				for(BinaryTreeIterator<HistoryTreePayload> it = theTree.getLeafIterator(); it.hasNext(); ) {
					HistoryTreePayload payload = it.next();
					ps.println(lambdas[payload.clusterid]);
				}
				ps.close();
			} catch (FileNotFoundException e) {
				e.printStackTrace();
			}
		}
		
		/**
		 * @return
		 */
		private double[] EMstep() {
			final double[] pr1 = new double[lambdas.length];
			final double[] pr2 = new double[lambdas.length];
			Arrays.fill(pr1, 0.0);
			Arrays.fill(pr2, 0.0);
			
			JobManager manager = JobManager.getInstance();
			JobGroup group = manager.createJobGroup("EM step");
			
			final Lock prLock = new ReentrantLock();
			
			final double uniformProb = 1.0 / allOvertFactors.size();
			
			for(BinaryTreeIterator<HistoryTreePayload> iter = theTree.getLeafIterator(); iter.hasNext(); ) {
				final BinaryTree<HistoryTreePayload> leaf = iter.nextNode();
				if (leaf.getPayload().isBackoff) continue;
				
				final int id = leaf.getPayload().clusterid;
				
				Runnable run = new Runnable() {
					public void run() {
						Distribution devDist = treeNgramDist.computeDistribution(leaf); 
						if (devDist == null) {
							return;
						}
						long startTime = System.currentTimeMillis();
						
						Long2DoubleMap devData = devDist.getDistribution();
						
						// load all needed distributions into a local hash
						HashMap<Integer, Long2DoubleMap> distributions = new HashMap<Integer,Long2DoubleMap>(100);

						{
							BinaryTree<HistoryTreePayload> node = leaf;
							while(node != null) {
								Integer i = node.getPayload().clusterid;
								distributions.put(i, distCache.getItem(i));
								node = node.getParent();
							}
						}
						Long2DoubleMap pr1hash = new Long2DoubleMap(50);
						Long2DoubleMap pr2hash = new Long2DoubleMap(50);
						
						/*
						 * OLD VERSION
						 * 
						 *
						double alpha = 1.0;
						BinaryTree<HistoryTreePayload> node = leaf;
						while(node != null) {
							HistoryTreePayload payload = node.getPayload();
							int cid = payload.clusterid;
							//pr1hash.put(cid, new MutableDouble(0));
							//pr2hash.put(cid, new MutableDouble(0));
							
							Long2DoubleMap.Iterator iterator = devData.iterator();
							
							for(; iterator.hasNext();) {
								iterator.next();
								
								double count = iterator.getValue() * devDist.totalCount;

								if (count == 0) continue;
								
								long tupleBits = iterator.getKey();
								
								double parentBeta = 0;
								double leafBeta = 0;
								{
									boolean reachedParent = false;
									BinaryTree<HistoryTreePayload> n = leaf;
									double currLeafScale = 1.0;
									double currParentScale = 1.0;
									while(n != null) {
										int clusterid = n.getPayload().clusterid;
										double lambda = lambdas[clusterid];
										double prob = 0;
										Long2DoubleMap probs = distributions.get(clusterid);
										if (probs != null) {
											prob = probs.get(tupleBits) * lambda;
										}

										if (n == node.getParent()) {
											reachedParent = true;
										}
										
										if (reachedParent) {
											parentBeta += prob * currParentScale;
											currParentScale *= 1 - lambda;
										}
										leafBeta += prob * currLeafScale;

										currLeafScale *= 1 - lambda;
										n = n.getParent();
									}

									leafBeta += currLeafScale * uniformProb;
									parentBeta += currParentScale * uniformProb;
								}
								
								
								double prob = distributions.get(cid).get(tupleBits);
								
								double pr1add = alpha * lambdas[cid] * prob / leafBeta;
								
								double pr2add = alpha * (1 - lambdas[cid]) * parentBeta / leafBeta;
								
								pr1hash.addAndGet(cid, count * pr1add);
								pr2hash.addAndGet(cid, count * pr2add);
							}
							
							alpha *= 1.0 - lambdas[cid];
							node = node.getParent();
						}
						*/
						
						/*
						 * NEW OPTIMIZED VERSION
						 */ 
						LongAndDoublePair[] devProbs = new LongAndDoublePair[devData.size()];
						{
							int pos = 0;
							for(Long2DoubleMap.Iterator iterator = devData.iterator(); iterator.hasNext(); ) {
								Long2DoubleMap.Entry e = iterator.next();
								devProbs[pos++] = new LongAndDoublePair(e.getKey(), e.getValue());
							}
							// order words by descending probability
							Arrays.sort(devProbs, LongAndDoublePair.doubleDescComparator);
						}
						
						double accumulatedProb = 0;
						
						
						for(int pos = 0; pos < devProbs.length; ++pos) {
							if (accumulatedProb > 0.995) {
								// skip the tail
								//System.err.printf("skipping %d out of %d words, accumulated prob = %g\n", 
								//		devProbs.length-pos, devProbs.length, accumulatedProb);
								break;
							}
							long tupleBits = devProbs[pos].theLong;
							
							double devProb = devProbs[pos].theDouble;
							accumulatedProb += devProb;
							
							double leafBeta = 0;
							double parentBetas[];
							
							{
								ArrayList<Double> betaProbs = new ArrayList<Double>(50);
								ArrayList<Double> nodeLambdas = new ArrayList<Double>(50);
								BinaryTree<HistoryTreePayload> n = leaf;
								double currLeafScale = 1.0;
								while(n != null) {
									int clusterid = n.getPayload().clusterid;
									double lambda = lambdas[clusterid];
									double prob = 0;
									Long2DoubleMap probs = distributions.get(clusterid);
									if (probs != null) {
										prob = probs.get(tupleBits);
									}
									betaProbs.add(prob);
									nodeLambdas.add(lambda);

									leafBeta += prob * lambda * currLeafScale;

									currLeafScale *= 1 - lambda;
									n = n.getParent();
								}
								//betaProbs.add(uniformProb);
								//nodeLambdas.add(1.0);
								
								parentBetas = new double[betaProbs.size()+1];
								parentBetas[parentBetas.length-1] = uniformProb;
								
								for(int i=betaProbs.size()-1; i>=0; --i) {
									double lambda = nodeLambdas.get(i);
									parentBetas[i] = lambda * betaProbs.get(i) + (1-lambda) * parentBetas[i+1];
								}
								
								leafBeta += currLeafScale * uniformProb;
							}
							
							if (leafBeta == 0) {
								System.err.printf("leafBeta in zero for %s in cluster #%d\n", 
										FactorTuple.toStringNoNull(tupleBits), leaf.getPayload().clusterid);
								continue;
							}
							BinaryTree<HistoryTreePayload> node = leaf;
							double alpha = 1.0;
							int nodeIndex = 0;
							while(node != null) {
								double parentBeta = parentBetas[nodeIndex++];
								
								HistoryTreePayload payload = node.getPayload();
								int cid = payload.clusterid;
								
								double prob = distributions.get(cid).get(tupleBits);
								
								double pr1add = alpha * lambdas[cid] * prob / leafBeta;
								
								double pr2add = alpha * (1 - lambdas[cid]) * parentBeta / leafBeta;
								
								pr1hash.addAndGet(cid, devDist.getTotalCount() * devProb * pr1add);
								pr2hash.addAndGet(cid, devDist.getTotalCount() * devProb * pr2add);

								alpha *= 1.0 - lambdas[cid];
								node = node.getParent();
							}
						}
						
						
						prLock.lock();
						for(Long2DoubleMap.Iterator i = pr1hash.iterator(); i.hasNext();) {
							Long2DoubleMap.Entry entry = i.next();
							pr1[(int)entry.getKey()] += entry.getValue();
						}
						for(Long2DoubleMap.Iterator i = pr2hash.iterator(); i.hasNext();) {
							Long2DoubleMap.Entry entry = i.next();
							pr2[(int)entry.getKey()] += entry.getValue();
						}
						prLock.unlock();
						
						long endTime = System.currentTimeMillis();
						
						//System.err.printf("Leaf #%d took %g seconds\n", id, 0.001 * (endTime - startTime));
					}
				};
				Job job = new Job(run, group, "cluster #" + Integer.toString(id));
				manager.addJob(job);
			}
			group.join();

			/*
			if (buckets != null) {
				for(int[] bucket : buckets) {
					if (bucket.length == 1) continue;
					
					double pr1sum = 0.0;
					double pr2sum = 0.0;
					for(int clusterid : bucket) {
						pr1sum += pr1[clusterid];
						pr2sum += pr2[clusterid];
					}
					pr1sum /= bucket.length;
					pr2sum /= bucket.length;
					for(int clusterid : bucket) {
						pr1[clusterid] = pr1sum;
						pr2[clusterid] = pr2sum;
					}
				}
			}
			*/
			
			double[] newLambdas = new double[lambdas.length];
			final double minLambda = MIN_LAMBDA * 0.1;
			for(int i=0; i<lambdas.length; ++i) {
				newLambdas[i] = (pr1[i] + pr2[i] == 0.0) ? 0.0 : pr1[i] / (pr1[i] + pr2[i]);
				if (Double.isNaN(newLambdas[i])) {
					System.err.printf("lambda for cluster #%d is NaN\n", i);
					newLambdas[i] = minLambda;
				} else {
					double maxLambda = maxLambda(i);
					if (newLambdas[i] > maxLambda) {
						newLambdas[i] = maxLambda;
					} else if (newLambdas[i] < minLambda) {
						newLambdas[i] = minLambda;
					}
				}
			}
			
			
			if (buckets != null) {
				for(int[] bucket : buckets) {
					if (bucket.length == 1) continue;
					
					double sum = 0.0;
					for(int clusterid : bucket) {
						sum += newLambdas[clusterid];
					}
					sum /= bucket.length;
					for(int clusterid : bucket) {
						newLambdas[clusterid] = sum;
					}
				}
			}
			
			return newLambdas;
		}
		
		private double maxLambda(int clusterid) {
			long totalCount = clusterCounts[clusterid];
			return totalCount == 0 ? 0 : 1.0 - 1.0 / totalCount;
		}

		public void clean() {
			clusterCounts = null;
			buckets = null;
			distCache = null;
			try {
				db.close();
			} catch (DatabaseException e) {
				e.printStackTrace();
			}
		}
		
		public double computeDevEntropy(byte dataid) {
			final MutableDouble totalLL = new MutableDouble();
			final MutableDouble totalCount = new MutableDouble();
			final Lock lock = new ReentrantLock();
			
			JobManager manager = JobManager.getInstance();
			JobGroup group = manager.createJobGroup("entropy");
			
			for(BinaryTreeIterator<HistoryTreePayload> leafIt = theTree.getLeafIterator(); leafIt.hasNext(); ) {
				final BinaryTree<HistoryTreePayload> leaf = leafIt.nextNode();
				if (leaf.getPayload().isBackoff) continue;
				
				final int clusterid = leaf.getPayload().clusterid;
				final CountDistribution dist = distStorage.getDistribution(clusterid, dataid);
				if (dist == null) continue;
				
				Runnable run = new Runnable() {

					@Override
					public void run() {
						
						BinaryTree<HistoryTreePayload> leaf = nodes[clusterid];
						Distribution devDist = treeNgramDist.computeDistribution(leaf);
						
						double LL = 0;
						for(Long2IntMap.Iterator it = dist.getCounts().iterator(); it.hasNext();) {
							Long2IntMap.Entry entry = it.next();
							long word = entry.getKey();
							double prob = devDist.getDistribution().get(word);
							double addition = entry.getValue() * Math.log10(prob);
							if (!Double.isInfinite(addition)) {
								LL += addition;
							} else {
								System.err.printf("skipping %s dev prob = %g, count = %d\n", 
										FactorTuple.toStringNoNull(word), prob, entry.getValue());
							}
						}
						lock.lock();
						totalLL.add(LL);
						totalCount.add(dist.getTotalCount());
						lock.unlock();
					}
				};
				Job job = new Job(run, "entropy");
				manager.addJob(group, job);
			}			
			group.join();
			return - totalLL.doubleValue() / totalCount.doubleValue();
		}
		
		public double computeEntropy(byte dataid) {
			final MutableDouble totalLL = new MutableDouble();
			final MutableDouble totalCount = new MutableDouble();
			final Lock lock = new ReentrantLock();
			
			JobManager manager = JobManager.getInstance();
			JobGroup group = manager.createJobGroup("entropy");
			
			for(BinaryTreeIterator<HistoryTreePayload> leafIt = theTree.getLeafIterator(); leafIt.hasNext(); ) {
				final BinaryTree<HistoryTreePayload> leaf = leafIt.nextNode();
				if (leaf.getPayload().isBackoff) continue;
				
				final int clusterid = leaf.getPayload().clusterid;
				final CountDistribution dist = distStorage.getDistribution(clusterid, dataid);
				if (dist == null) continue;
				
				Runnable run = new Runnable() {

					@Override
					public void run() {
						
						Long2DoubleMap theProbs = new Long2DoubleMap(dist.getCounts().size());
						BinaryTree<HistoryTreePayload> leaf = nodes[clusterid];
						BinaryTree<HistoryTreePayload> node = leaf;
						
						ArrayList<MutableDouble> scales = new ArrayList<MutableDouble>(50);
						{						
							double currScale = 1;
							while(node != null) {
								int cid = node.getPayload().clusterid;
								double lambda = lambdas[cid];
								scales.add(new MutableDouble(lambda * currScale));
								currScale *= 1 - lambda;
								node = node.getParent();
							}
						}
						
						node = leaf;
						int pos = 0;
						while(node != null) {
							int cid = node.getPayload().clusterid;
							double scale = scales.get(pos++).doubleValue();
							Long2DoubleMap nodeDist = distCache.getItem(cid);
							for(Long2IntMap.Iterator it = dist.getCounts().iterator(); it.hasNext();) {
								long word = it.next().getKey();
								theProbs.addAndGet(word, nodeDist.get(word) * scale);
							}
							node = node.getParent();
						}
						
						double LL = 0;
						for(Long2IntMap.Iterator it = dist.getCounts().iterator(); it.hasNext();) {
							Long2IntMap.Entry entry = it.next();
							long word = entry.getKey();
							double prob = theProbs.get(word);
							double addition = entry.getValue() * Math.log10(prob);
							if (!Double.isInfinite(addition)) {
								LL += addition;
							} else {
								System.err.printf("prob of %s is %g, count = %d\n", FactorTuple.toStringNoNull(word), prob, entry.getValue());
							}
						}
						lock.lock();
						totalLL.add(LL);
						totalCount.add(dist.getTotalCount());
						lock.unlock();
					}
					
				};
				
				Job job = new Job(run, "entropy");
				manager.addJob(group, job);
			}
			group.join();
			return - totalLL.doubleValue() / totalCount.doubleValue();
		}
	}
	
	private Long2DoubleMap makeDistribution(int clusterid, int pruningThreshold, byte[] dataIds) {
		
		Long2DoubleMap dist = new Long2DoubleMap();
		long totalCount = 0;
		for(byte dataId : dataIds) {
			CountDistribution dataDist = distStorage.getDistribution(clusterid, dataId);
			if (dataDist != null) {
				totalCount += dataDist.getTotalCount();
				for(Long2IntMap.Iterator i = dataDist.getCounts().iterator(); i.hasNext();) {
					Long2IntMap.Entry entry = i.next();
					dist.addAndGet(entry.getKey(), entry.getValue());
				}
			}
		}
		
		if (pruningThreshold > 0) {
			// do the pruning
			for(Long2DoubleMap.Iterator i = dist.iterator(); i.hasNext(); ) {
				Long2DoubleMap.Entry entry = i.next();
				long count = Math.round(entry.getValue());
				if (count <= pruningThreshold) {
					i.remove();
					totalCount -= count;
				}
			}
		}

		if (totalCount > 0) {
			double revTotalCount = 1.0 / totalCount;
			for(Long2DoubleMap.Iterator i = dist.iterator(); i.hasNext(); ) {
				Long2DoubleMap.Entry entry = i.next();
				entry.setValue(entry.getValue() * revTotalCount);
			}
		}
		
		return dist;
	}
	
	public static String makeDataFilename(int level, int dataId) {
		return String.format("layer-%03d-data-%d", level, dataId);
	}
}
