/**
 * 
 */
package edu.umd.clip.lm.model.decoding;

import java.util.*;
import java.util.concurrent.atomic.AtomicLong;

import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.storage.AbstractProbTreeStorage;
import edu.umd.clip.lm.storage.AsyncConsumer;
import edu.umd.clip.lm.util.LRUCache;
import edu.umd.clip.lm.util.Timing;
import edu.umd.clip.lm.util.tree.BinaryTree;
import edu.umd.clip.lm.util.tree.BinaryTreeIterator;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class InterpolatingProbTreeStorage extends AbstractProbTreeStorage {
	
	private static class InterpolatedProbTreeValue {
		public final OnDiskCompactProbTree probTree;
		public double backoff;
		public double totalScale;
		public boolean lastIsNull;
		
		private InterpolatedProbTreeValue(OnDiskCompactProbTree probTree, double backoff) {
			this(probTree, backoff, 0, false);
		}
		
		private InterpolatedProbTreeValue(OnDiskCompactProbTree probTree, double backoff, double totalScale) {
			this(probTree, backoff, totalScale, false);
		}
		
		private InterpolatedProbTreeValue(OnDiskCompactProbTree probTree, double backoff, double totalScale, boolean lastIsNull) {
			this.probTree = probTree;
			this.backoff = backoff;
			this.totalScale = totalScale;
			this.lastIsNull = lastIsNull;
		}
	}
	
	static AtomicLong totalStorageTime = new AtomicLong();
	static AtomicLong totalTreeMergingTime = new AtomicLong();
	int debug = 1;
	
	private class InterpolatedProbTreeCache extends LRUCache<Key, InterpolatedProbTreeValue> {
		public InterpolatedProbTreeCache(int hardCapacity, int softCapacity) {
			super(hardCapacity, softCapacity);
		}
	
		@Override
		protected InterpolatedProbTreeValue loadItem(Key key) {
			int clusterid = key.getClusterid();
			int modelId = key.getModelid();
			LanguageModel lm = lms[modelId];
			BinaryTree<HistoryTreePayload> node = lm.getDecodingRuntime().getNode(clusterid);
			double lambda = node.getPayload().lambda;
			if (node.getPayload().top) {
				// no interpolation with the parent
				long start = Timing.nanoTime();
				OnDiskCompactProbTree tree = storage.getProbTree(key);
				long end = Timing.nanoTime();
				if (Timing.TIMING) totalStorageTime.addAndGet(end - start);
				
				double backoffWeight = getBackoff(modelId, node);
				InterpolatedProbTreeValue result = new InterpolatedProbTreeValue(tree, backoffWeight);
				
				if (tree != null) {
					//tree = (ProbTree) tree.clone();
					result.totalScale = lambda;
				}
				//System.out.printf("NEW[%d]: lambda=%e, prob=%e\n", clusterid, lambda, result.getFirst().getTotalProb());
				return result;
			}
			
			BinaryTree<HistoryTreePayload> parent = node.getParent();
			InterpolatedProbTreeValue parentPair;
			
			Key parentKey = new Key(lm.getIdNum(), parent.getPayload().clusterid, key.getFactorBits());
			parentPair = getCachedProbTree(parentKey);
			
			OnDiskCompactProbTree parentProbTree = parentPair.probTree; 
			
			OnDiskCompactProbTree probTree = null;
			
			// no need to request a tree if the parent didn't have it
			// TODO: this may collide with some possible optimizations
			if (!parentPair.lastIsNull) {
				long start = Timing.nanoTime();
				probTree = storage.getProbTree(lm.getIdNum(), clusterid, key.getFactorBits());
				long end = Timing.nanoTime();
				if (Timing.TIMING) totalStorageTime.addAndGet(end - start);
			}
			
			double backoffWeight = parentPair.backoff * (1.0 - lambda);
			double totalScale = parentPair.totalScale * (1.0 - lambda);
			InterpolatedProbTreeValue result = null;
			if (probTree == null || lambda < 1e-20) {
				if (parentProbTree == null) {
					result = new InterpolatedProbTreeValue(null, backoffWeight, totalScale, probTree==null);
					if (debug > 2) {
						System.out.printf("[%s] skipping interpolation (null parent) lambda=%e, result=%e, clusterid=%d\n", 
								lm.getId(), lambda, result.probTree.getTotalProb(), clusterid);
					}
					return result;
				}
				parentProbTree = (OnDiskCompactProbTree) parentProbTree.clone();
				parentProbTree.scale(1.0 - lambda);
				result = new InterpolatedProbTreeValue(parentProbTree, backoffWeight, totalScale, probTree==null);
				if (debug > 2) {
					System.out.printf("[%s] skipping interpolation with lambda=%e, result=%e, clusterid=%d\n", 
							lm.getId(), lambda, result.probTree.getTotalProb(), clusterid);
				}
				//System.out.printf("NEW[%d]: lambda=%e, prob=%e\n", clusterid, lambda, result.getFirst().getTotalProb());
				return result;
			}
			
			totalScale += lambda;
			
			if (parentProbTree == null) {
				probTree = (OnDiskCompactProbTree) probTree.clone();
				probTree.scale(lambda);
				result = new InterpolatedProbTreeValue(probTree, backoffWeight, totalScale);
				if (debug > 2) {
					System.out.printf("[%s] skipping interpolation (null parent) lambda=%e, result=%e, clusterid=%d\n", 
							lm.getId(), lambda, result.probTree.getTotalProb(), clusterid);
				}
				//System.out.printf("NEW[%d]: lambda=%e, prob=%e\n", clusterid, lambda, result.getFirst().getTotalProb());
				return result;
			}
			// merge both trees	
			ArrayList<ProbTree> trees = new ArrayList<ProbTree>(2);
			{
				long start = Timing.nanoTime();
				OnDiskCompactProbTree intTree = (OnDiskCompactProbTree) parentProbTree.clone();
				intTree.scale(1.0 - lambda);
				probTree = (OnDiskCompactProbTree) probTree.clone();
				probTree.scale(lambda);
				
				intTree = OnDiskCompactProbTree.merge(intTree, probTree);
				
				intTree.normalize(); // TODO: check if this is necessary
				
				result = new InterpolatedProbTreeValue(intTree, backoffWeight, totalScale);
				long end = Timing.nanoTime();
				if (Timing.TIMING) {
					totalTreeMergingTime.addAndGet(end - start);
				}
			}
			if (debug > 2) {
				System.out.printf("[%s] interpolated %e and %e, lambda=%e, result = %e, clusterid=%d, backoff = %e\n", 
						lm.getId(), trees.get(0).getTotalProb(), trees.get(1).getTotalProb(), 
						lambda, result.probTree.getTotalProb(), clusterid, backoffWeight);
			}
			//System.out.printf("NEW[%d]: lambda=%e, prob=%e\n", clusterid, lambda, result.getFirst().getTotalProb());
			return result;
		}
	}

	private final LanguageModel lms[];
	private final AbstractProbTreeStorage storage;
	private final InterpolatedProbTreeCache interpolatedProbTreeCache[];
	private final int numCaches = 16; // must be a power of 2
	private int hardCacheSize = 400000;
	private int softCacheSize = 100000;
	//private BinaryTree<HistoryTreePayload> nodes[];
	//private double backoffWeights[];

	public InterpolatingProbTreeStorage(AbstractProbTreeStorage storage) {
		this.lms = new LanguageModel[Experiment.getInstance().getNumLMs()];
		this.storage = storage;
		
		interpolatedProbTreeCache = new InterpolatedProbTreeCache[numCaches];
		for(int i=0; i<numCaches; ++i) {
			interpolatedProbTreeCache[i] = new InterpolatedProbTreeCache(hardCacheSize/numCaches, softCacheSize/numCaches);
		}
	}

	public void addLm(LanguageModel lm) {
		lms[lm.getIdNum()] = lm;
	}
	
	private int getCacheNum(Key key) {
		int h = key.hashCode();
        h ^= (h >>> 21) ^ (h >>> 13);
        h ^= (h >>> 8) ^ (h >>> 5);

		int cacheNum = h & (numCaches - 1);
		return cacheNum;
	}

	public void setCacheSize(int cacheSize) {
		for(int i=0; i<numCaches; ++i) {
			interpolatedProbTreeCache[i] = new InterpolatedProbTreeCache(cacheSize/numCaches, cacheSize/numCaches);
		}
	}
	

	private InterpolatedProbTreeValue getCachedProbTree(Key key) {
		int cacheNum = getCacheNum(key);
		return interpolatedProbTreeCache[cacheNum].getItem(key);
	}

	private double getBackoff(int modelId, BinaryTree<HistoryTreePayload> node) {
		int clusterid = node.getPayload().clusterid;
		return lms[modelId].getDecodingRuntime().getBackoff(clusterid);
	}
	
	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.storage.AbstractProbTreeStorage#getProbTree(edu.umd.clip.lm.storage.AbstractProbTreeStorage.Key)
	 */
	@Override
	public OnDiskCompactProbTree getProbTree(Key key) {
		InterpolatedProbTreeValue val = getCachedProbTree(key);
		return val.probTree;
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.storage.AbstractProbTreeStorage#getStats()
	 */
	@Override
	public String getStats() {
		final double nano = 1e-9;
		long totalHits = 0;
		long totalRequests = 0;
		long totalDelayed = 0;
		for(InterpolatedProbTreeCache cache : interpolatedProbTreeCache) {
			totalHits += cache.getHits();
			totalRequests += cache.getRequests();
			totalDelayed += cache.getDelayed();
		}
		String stats = String.format("InterpolatedProbTreeCache: %.3f%% hits, %.3f%% delayed of %d\n" +
				"backend storage time: %.3fs, merge time %.3fs\n%s", 
				(double)totalHits * 100 / totalRequests, (double)totalDelayed * 100/ totalRequests, 
				totalRequests,
				nano * totalStorageTime.get(), nano * totalTreeMergingTime.get(),
				storage.getStats());
		return stats;
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.storage.AbstractProbTreeStorage#requestProbTree(edu.umd.clip.lm.storage.AbstractProbTreeStorage.Key)
	 */
	@Override
	protected void requestProbTree(Key key) {
		getProbTree(key);
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.storage.AbstractProbTreeStorage#setProbTree(edu.umd.clip.lm.storage.AbstractProbTreeStorage.Key, edu.umd.clip.lm.model.ProbTree)
	 */
	@Override
	public void setProbTree(Key key, OnDiskCompactProbTree probTree) {
		throw new UnsupportedOperationException();
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.storage.AsyncProducer#request(java.lang.Object, edu.umd.clip.lm.storage.AsyncConsumer)
	 */
	@Override
	public void request(Key key, AsyncConsumer<Key, OnDiskCompactProbTree> callback) {
		OnDiskCompactProbTree probTree = getProbTree(key);
		callback.receivedData(key, probTree);
	}

	/* (non-Javadoc)
	 * @see edu.umd.clip.lm.storage.AbstractProbTreeStorage#closeAll()
	 */
	@Override
	public void closeAll() {
		storage.closeAll();
	}

}
