/**
 * 
 */
package edu.umd.clip.lm.model.decoding;

import java.io.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.*;

import com.meetup.memcached.*;

import edu.berkeley.nlp.util.Pair;
import edu.umd.clip.jobs.JobManager;
import edu.umd.clip.lm.factors.Dictionary;
import edu.umd.clip.lm.factors.*;
import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.util.*;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class NgramMarginalization {
	private ForestModel forests[];
	private final byte wordFactorIdx;
	private final long overtMask;
	private final LRUCache<NgramCacheKey,Double> ngramCache;
	private TreeTrellisNode head;
	private final int swapCacheAfterNgrams = 10000;
	private final int trellisCacheSize;
	private final AtomicInteger ngramsLeft = new AtomicInteger(swapCacheAfterNgrams);
	private final MemcachedClient memcached;
	private BufferedWriter dumpNgrams;
	private boolean maxForestStarts = false;
	
	public NgramMarginalization(ForestModel forest) {
		this(forest, 10000, 1000, null);
	}
	
	public NgramMarginalization(ForestModel forest, int ngramCacheSize, final int trellisCacheSize, String memcachedAddr) {
		this.trellisCacheSize = trellisCacheSize;
		
		Experiment exp = Experiment.getInstance();
		this.forests = exp.getForestSequence(forest);
		createNewCache();
		
		wordFactorIdx = exp.getTupleDescription().getMainFactorIndex();
		overtMask = exp.getTupleDescription().getOvertFactorsMask();
		
		ngramCache = new LRUCache<NgramCacheKey,Double>(ngramCacheSize,ngramCacheSize*10) {
			@Override
			protected Double loadItem(NgramCacheKey key) {
				
				if (head != null && ngramsLeft.decrementAndGet() == 0) {
					System.err.printf("Swapping cache: %s\nNgram cache stats: %s\n", head.getCache().getStats(), ngramCache.getCacheStats());
					createNewCache();
					ngramsLeft.set(trellisCacheSize * 10);
				}
				
				final long ngram[] = key.words;
				if (memcached != null) {
					String memcachedKey = ngram2memcachedKey(ngram);
					Double prob = (Double) memcached.get(memcachedKey);
					if (prob == null) {
						prob = newComputeNgramProbability(ngram);
						memcached.set(memcachedKey, prob);
					}
					return prob;
				}
				double prob = newComputeNgramProbability(ngram);
				return prob;
			}
		};

		if (memcachedAddr == null) {
			this.memcached = null;
		} else {
			
			SockIOPool sockPool = SockIOPool.getInstance();
			sockPool.setServers(new String[] {memcachedAddr});
			sockPool.setInitConn(JobManager.getInstance().getNumWorkers());
			sockPool.initialize();
			
			this.memcached = new MemcachedClient();
			this.memcached.setPrimitiveAsString(false);
			this.memcached.setSanitizeKeys(false);
			this.memcached.setCompressEnable(false);
			//System.err.print(getStats());
		}
	}
	
	private void createNewCache() {
		if (trellisCacheSize > 0) {
			TrellisNodeCache nodeCache = new TrellisNodeCache(trellisCacheSize);
			head = new TreeTrellisNode(nodeCache);
		}
	}
	
	public double getNgramProbability(long[] ngram) {
		Double prob = ngramCache.getItem(new NgramCacheKey(ngram));
		return prob;
	}
	
	private static void printNgram(long ngram[], StringBuilder sb) {
		sb.append('[');
		if (ngram.length > 0) {
			sb.append(FactorTuple.toStringNoNull(ngram[0]));
		}
		for(int i=1; i<ngram.length; ++i) {
			sb.append(' ');
			sb.append(FactorTuple.toStringNoNull(ngram[i]));
		}
		sb.append(']');
	}
	
	public final static String ngram2memcachedKey(long [] ngram) {
		FactorTupleDescription desc = Experiment.getInstance().getTupleDescription();
		StringBuilder sb = new StringBuilder(ngram.length * 8);
		if (ngram.length > 0) {
			int compactWord = desc.packOvertFactorsToInt(ngram[0]);
			sb.append(Integer.toString(compactWord, Character.MAX_RADIX));
		}
		for(int i=1; i<ngram.length; ++i) {
			sb.append('-');
			int compactWord = desc.packOvertFactorsToInt(ngram[i]);
			sb.append(Integer.toString(compactWord, Character.MAX_RADIX));			
		}
		return sb.toString();
	}
	
	// computes probability of an ngram with tags marginalized
	// the ngram is in order w_{i-n+1}...w_{i-1} w_{i}
	// and lms are in the order unigram, bigram, etc
	public double newComputeNgramProbability(long[] ngram) {
		byte addedStarts=0;
		
		// find the LAST <s>, to handle crazy strings like "<s> the <s> key"
		for(int pos = ngram.length-1; pos >= 0; --pos) {
    		if (Dictionary.isStart(FactorTuple.getValue(ngram[pos], wordFactorIdx))) {
    			if (pos == ngram.length-1) {
    				// can happen when the ngram ends with <s>
    				// return very small probability
    				return -100.0;    				
    			}
    			ngram = Arrays.copyOfRange(ngram, pos, ngram.length);
    			addedStarts = 1;
    			ForestModel maxForest = forests[forests.length-1];
    			if (maxForestStarts && ngram.length < maxForest.getOrder()) {
    				long newNgram[] = new long[maxForest.getOrder()];
    				Arrays.fill(newNgram, 0, newNgram.length - ngram.length, ngram[0]);
    				System.arraycopy(ngram, 0, newNgram, newNgram.length - ngram.length, ngram.length);
    				addedStarts += newNgram.length - ngram.length;
    				ngram = newNgram;
    			}
    			break;
    		}			
		}
		
		TreeTrellisNode currentNode = head == null ? new TreeTrellisNode() : head;
		byte forestOffset = addedStarts;
		
		if (addedStarts > 0) {
			currentNode = Decoder.addStarts(currentNode, addedStarts);
		}
		
		for(byte i=addedStarts; i<ngram.length; ++i,++forestOffset) {
			ForestModel forest = forests[Math.min(i, forests.length-1)];
			long word = ngram[i] & overtMask;
			
			currentNode = currentNode.appendNodeAndLock(word);
			TreeTrellisNode prevNode = currentNode.getPreviousNode();
			try {
				if (!currentNode.isDecoded()) {
					prevNode.lock(forest.getOrder());
					try {
						DecodingContext context = new DecodingContext(currentNode, forest);
						
						forest.getDecoder().partition(context);
						
						currentNode.finishDecoding();
					} finally {
						prevNode.unlock(forest.getOrder());
					}
				}
	
			} finally {
				currentNode.unlock();
			}
		}
		TreeTrellisNode prevNode = currentNode.getPreviousNode();

		double prob = currentNode.getCumulativeLogScale() + Math.log10(currentNode.getTotalProb());
		double prevProb = prevNode.getCumulativeLogScale() + Math.log10(prevNode.getTotalProb());
		
		currentNode.releasePath();
		
		double theProb = prob - prevProb;
		if (dumpNgrams != null) {
			dumpNgram(ngram, theProb);
		}
		return theProb;
	}

	// computes probability of an ngram with tags marginalized
	// the ngram is in order w_{i-n+1}...w_{i-1} w_{i}
	// and lms are in the order unigram, bigram, etc
	public double computeNgramProbability(long[] ngram) {
		int order = Math.min(ngram.length, forests.length);
		
		SentenceContext ctx = forests[order-1].getDecoder().createContext();
		double n_1_prob = 0;
		
		byte addedStarts=0;
		
		// find the LAST <s>, to handle crazy strings like "<s> the <s> key"
		for(int pos = ngram.length-1; pos >= 0; --pos) {
    		if (Dictionary.isStart(FactorTuple.getValue(ngram[pos], wordFactorIdx))) {
    			ngram = Arrays.copyOfRange(ngram, pos+1, ngram.length);
    			addedStarts = (byte) (pos+1);
    			break;
    		}			
		}

		/*
		// find the first non-<s> word
    	for(; addedStarts<ngram.length; ++addedStarts) {
    		if (!Dictionary.isStart(FactorTuple.getValue(ngram[addedStarts], wordFactorIdx))) {
    			if (addedStarts > 0) {
    				ngram = Arrays.copyOfRange(ngram, addedStarts, ngram.length);
    			}
    			break;
    		}
    	}
		*/
		
		if (ngram.length == 0) {
			// can happen when the ngram ends with <s>
			// return very small probability
			return -100.0;
		}
		//int addedStarts = lms.length - ngram.length;
		
		for(int i=0; i<ngram.length; ++i) {
			ctx = ctx.addWord(ngram[i] & overtMask);
			forests[Math.min(i+addedStarts, forests.length-1)].getDecoder().partition(ctx);
			ctx.setLastSliceTotalProb(ctx.getCurrentSlice().getTotalProb());
			if (i == ngram.length-2) {
				n_1_prob = ctx.getLogProb();
			}
		}
		double n_prob = ctx.getLogProb();
		return n_prob - n_1_prob;
	}
	
	public void dumpNgram(long[] ngram, double prob) {
		StringBuilder sb = new StringBuilder(128);
		for(long word : ngram) {
			sb.append(FactorTuple.toStringNoNull(word));
			sb.append(' ');
		}
		sb.append(prob);
		sb.append('\n');
		try {
			dumpNgrams.write(sb.toString());
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	public void flushNgramDump() {
		if (dumpNgrams != null) {
			try {
				dumpNgrams.flush();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	final static Pattern dumpedNgramRe = Pattern.compile("^(?:(.*\\S)\\s+|)(\\S+)$");
	public static Pair<long[], Double> readDumpedNgram(InputParser parser, String line) {
		line =  line.trim();
		Matcher matcher = dumpedNgramRe.matcher(line);
		if (matcher.matches()) {
			String ngramStr = matcher.group(1);
			long ngram[] = ngramStr.isEmpty() ? new long[0] : parser.parseSentence(ngramStr);
			Double prob = Double.valueOf(matcher.group(2));
			return new Pair<long[], Double>(ngram, prob);
		}
		System.err.printf("Failed to read dumped ngram: '%s'\n", line);
		return null;
	}
	
	public synchronized void setDumpNgramsFile(String fname) {
		try {
			if (this.dumpNgrams != null) {
				this.dumpNgrams.flush();
				//this.dumpNgrams.close();
				this.dumpNgrams = IO.getWriter(IO.getOutputStream(fname));
			} else {
				this.dumpNgrams = IO.getWriter(IO.getOutputStream(fname));
				
				Timer timer = new Timer(true);
				TimerTask task = new TimerTask() {
					public void run() {
						if (dumpNgrams != null) {
							try {
								dumpNgrams.flush();
							} catch (IOException e) {
								e.printStackTrace();
							}
						}
					}
				};
				timer.scheduleAtFixedRate(task, 0, 10000);
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	public String getStats() {
		StringBuilder sb = new StringBuilder();
		sb.append("Local ngram cache stats: ");
		sb.append(ngramCache.getCacheStats());
		sb.append('\n');
		
		if (memcached != null) {
			// server name -> key -> value
			@SuppressWarnings("unchecked")
			Map<String,Map<String,String>> memcachedStats = memcached.stats();
			for(Map.Entry<String,Map<String,String>> e1 : memcachedStats.entrySet()) {
				String servername = e1.getKey();
				sb.append("Memcached stats for ");
				sb.append(servername);
				sb.append(":\n");
				for(Map.Entry<String, String> e2 : e1.getValue().entrySet()) {
					String key = e2.getKey();
					String value = e2.getValue();
					sb.append("  ");
					sb.append(key);
					sb.append('=');
					sb.append(value);
					sb.append('\n');
				}
			}
		}
		return sb.toString();
	}
	
	static private class NgramCacheKey {
		private long words[];

		public NgramCacheKey(long[] words) {
			this.words = words;
		}

		@Override
		public int hashCode() {
			return Arrays.hashCode(words);
		}

		@Override
		public boolean equals(Object obj) {
			if (this == obj)
				return true;
			if (obj == null)
				return false;
			if (!(obj instanceof NgramCacheKey))
				return false;
			NgramCacheKey other = (NgramCacheKey) obj;
			if (!Arrays.equals(words, other.words))
				return false;
			return true;
		}
	}

	public final boolean isMaxForestStarts() {
		return maxForestStarts;
	}

	public final void setMaxForestStarts(boolean maxForestStarts) {
		this.maxForestStarts = maxForestStarts;
	}

}
