/**
 * 
 */
package edu.umd.clip.lm.ngram;

import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;

import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.factors.*;
import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.model.data.Context;
import edu.umd.clip.lm.model.data.ContextFuturesPair;
import edu.umd.clip.lm.model.data.TrainingDataBlock;
import edu.umd.clip.lm.model.data.TupleCountPair;
import edu.umd.clip.lm.model.data.WritableTrainingData;
import edu.umd.clip.lm.util.LRU;
import edu.umd.clip.lm.util.Long2IntMap;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class TrainingDataSequenceSampler {
	private final NgramModel model1;
	private final NgramModel model2;
	private final int order;
	
	private final boolean debug = false;
	/**
	 * @param model1
	 * @param model2
	 */
	public TrainingDataSequenceSampler(NgramModel model1, NgramModel model2) {
		this.model1 = model1;
		this.model2 = model2;
		
		Experiment exp = Experiment.getInstance();
		this.order = exp.getLM().getOrder();
	}
	
	public void doSampling(final long sampleSize, final WritableTrainingData data) throws IOException {
		JobManager manager = JobManager.getInstance();
		JobGroup group = manager.createJobGroup("sampling");
		final AtomicLong samplesLeft = new AtomicLong(sampleSize);
		final ConcurrentLinkedQueue<long[]> sequences = new ConcurrentLinkedQueue<long[]>();
		
		final TrainingDataBlock block[] = new TrainingDataBlock[1];
		block[0] = new TrainingDataBlock();
		
		@SuppressWarnings("serial")
		final LRU<Context, Long2IntMap> tmpData = 
			new LRU<Context, Long2IntMap>(1000000) {
				@Override
				protected boolean removeEldestEntry(
						Entry<Context, Long2IntMap> eldest) {
					if (super.removeEldestEntry(eldest)) {
						// TODO: use an intermediate queue
						Long2IntMap counts = eldest.getValue();
						
						ContextFuturesPair pair = new ContextFuturesPair(eldest.getKey(), TupleCountPair.fromMap(counts));
						if (!block[0].add(pair)) {
							try {
								data.add(block[0]);
								block[0] = new TrainingDataBlock();
								block[0].add(pair);
							} catch (IOException e) {
								e.printStackTrace();
							}
						}
						return true;
					}
					return false;
				}
		};
		
		Thread dataWriter;
		{
			Runnable run = new Runnable() {
				@Override
				public void run() {
					long TODO = sampleSize;
					// workers may generate more sequences than we need, don't throw them away if they are ready
					while(TODO > 0 || !sequences.isEmpty()) {
						long sequence[] = sequences.poll();
						if (sequence == null) {
							try {
								Thread.sleep(10);
							} catch (InterruptedException e) {
								e.printStackTrace();
							}
							continue;
						}
						
						for(int i=order-1; i<sequence.length; ++i) {
							long ctx[] = Arrays.copyOfRange(sequence, i-order+1, i);
							long future = sequence[i];
							Context context = new Context(ctx);
							Long2IntMap map = tmpData.get(context);
							if (map == null) {
								map = new Long2IntMap();
								tmpData.put(context, map);
							}
							map.addAndGet(future, 1);
						}
						TODO -= sequence.length - order + 1;
						samplesLeft.set(TODO);
					}
				}
			};
			dataWriter = new Thread(run, "data writer");
			dataWriter.start();
		}
		
		
		while(samplesLeft.get() > 0) {
			Runnable run = new Runnable(){
				@Override
				public void run() {
					if (samplesLeft.get() > 0) {
						long sequence[] = generateSequence();
						sequences.add(sequence);
					}
				}
			};
			
			Job job = new Job(run, "sequence generator");
			manager.addJob(group, job);
			group.join(manager.getNumWorkers()+1);
		}
		
		try {
			dataWriter.join();
			// some workers may still be working on new sequences
			// no need to wait for them
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
		
		
		// dump the remaining cache
		for(Map.Entry<Context, Long2IntMap> entry : tmpData.entrySet()) {
			ContextFuturesPair pair = new ContextFuturesPair(entry.getKey(), TupleCountPair.fromMap(entry.getValue()));
			if (!block[0].add(pair)) {
				data.add(block[0]);
				block[0] = new TrainingDataBlock();
				block[0].add(pair);
			}
		}
		if (block[0].hasData()) {
			data.add(block[0]);
		}

	}
	
	private long[] generateSequence() {
		long sequence[] = new long[1024];
		FactorTupleDescription desc = Experiment.getInstance().getTupleDescription();
		int pos = 0;
		for(; pos<order-1; ++pos) {
			sequence[pos] = desc.createStartTuple();
		}
		
		final long end = desc.createEndTuple();
		
		while(true) {
			if (pos == sequence.length) {
				sequence = Arrays.copyOf(sequence, sequence.length*2);
			}
			
			long context[] = Arrays.copyOfRange(sequence, pos-order+1, pos);
			long futureTuple = generateOneWord(context);
			sequence[pos] = futureTuple;
			++pos;
			if (futureTuple == end) break;
		}
		
		sequence = Arrays.copyOf(sequence, pos);
		
		if (debug) {
			StringBuilder sb = new StringBuilder();
			for(long tuple : sequence) {
				sb.append(FactorTuple.toStringNoNull(tuple));
				sb.append(' ');
			}
			sb.setCharAt(sb.length()-1, '\n');
			System.err.print(sb.toString());
		}
		return sequence;
	}
	
	private long generateOneWord(long context[]) {
		long futureTuple = 0;
		futureTuple = sampleFromModel(model1, context, futureTuple);
		futureTuple = sampleFromModel(model2, context, futureTuple);
		return futureTuple;
	}
	
	private long sampleFromModel(NgramModel model, long context[], long futureTuple) {
		Random rnd = new Random();
		
		int ctx[] = new int[model.getContextVariables().length];
		for(int i=0; i<ctx.length; ++i) {
			CtxVar var = model.getContextVariables()[i];
			ctx[i] = var.getValue(context, futureTuple);
		}

		int vocab[] = model.getFutureVocab(ctx); 

		double cumulativeDistribution[] = new double[vocab.length];

		double sum = 0;
		for(int i=0; i<vocab.length; ++i) {
			double prob = model.getProb(vocab[i], ctx);
			sum += prob;
			cumulativeDistribution[i] = sum;
		}
		double prob = rnd.nextDouble() * sum;
		int idx = Arrays.binarySearch(cumulativeDistribution, prob);
		if (idx < 0) idx = - idx - 1;
		if (idx == vocab.length) --idx;

		futureTuple = model.getFutureVariable().setValue(futureTuple, vocab[idx]);
		
		return futureTuple;
	}
}
