/**
 * 
 */
package edu.umd.clip.lm.programs;

import edu.berkeley.nlp.util.*;

import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.text.*;
import java.lang.management.*;

import com.sleepycat.je.DatabaseException;

import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.model.decoding.NgramMarginalization;
import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.factors.*;
import edu.umd.clip.lm.util.*;
import edu.umd.clip.lm.nbest.*;
import edu.umd.clip.lm.nbest.NbestFormat.NbestFormatData;


/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class LMRescorer {
	public static class Options {
        @Option(name = "-config", required = true, usage = "XML config file")
		public String config;
        @Option(name = "-input", required = false, usage = "List of nbest files (Default: stdin)")
		public String input;
        @Option(name = "-output", required = true, usage = "Output directory")
		public String output;
        @Option(name = "-jobs", usage = "number of concurrent jobs (default: 1)")
        public int jobs = 1;
        @Option(name = "-forest", required = true, usage = "the decision tree forest")
		public String forest;        
        @Option(name = "-debug", usage = "decoder debug level (default: 0)")
        public int debug = 0;
        @Option(name = "-host", usage = "remote storage hostname or IP address (default: use local storage)")
        public String host = null;
        @Option(name = "-port", usage = "remove storage port (default: 2332)")
        public int port = 0;
        @Option(name = "-meminfo", usage = "track memory usage (default: false)")
        public boolean meminfo = false;
        @Option(name = "-coarse-threshold", usage = "coarse threshold, requires the new decoder (default: 1e-4)")
        public double coarse = -1;
        @Option(name = "-use-constraints", usage = "compute conditional sentence probability given the constraints")
        public boolean useConstraints = false;
        @Option(name = "-silence-token", required = false, usage = "silence token (to be removed from the input)")
        public String silenceToken = null;
        @Option(name = "-silence-penalty", required = false, usage = "silence penalty (to be added to the log10 score for each silence token)")
        public double silencePenalty = -1;
        @Option(name = "-tokenize", required = false, usage = "tokenize the input (default: false)")
        public boolean tokenize = false;
        @Option(name = "-ngram", required = false, usage = "use marginalized ngram probabilities (default: false)")
        public boolean ngram = false;
        @Option(name = "-jerboa", required = false, usage = "use Jerboa storage (default: false)")
		public boolean useJerboa = false;        
        @Option(name = "-compact", required = false, usage = "use Compact storage (default: false)")
		public boolean useCompact = false;        
        @Option(name = "-bdb", required = false, usage = "use Berkeley DB storage (default: false)")
		public boolean useBDB = false;        
	}

	/**
	 * @param args
	 * @throws IOException 
	 * @throws ClassNotFoundException 
	 * @throws DatabaseException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException, DatabaseException {
		final MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean();
		
		MemoryUsage memuse;
		
        OptionParser optParser = new OptionParser(Options.class);
        final Options opts = (Options) optParser.parse(args, true);

		if (opts.meminfo) {
			memuse = memoryBean.getHeapMemoryUsage();
			System.out.printf("MEMUSE: initial: %dM/%dM\n", memuse.getUsed()/1048576, memuse.getMax()/1048576);
		}

        LMDecodingOptions lmOpts = new LMDecodingOptions();
        lmOpts.config = opts.config;
        lmOpts.debug = opts.debug;
        if (opts.jobs > 0) lmOpts.jobs = opts.jobs;
        if (opts.host != null) lmOpts.host = opts.host;
        if (opts.port > 0) lmOpts.port = opts.port;
        if (opts.coarse > 0) lmOpts.coarseThreshold = opts.coarse;
        if (opts.forest != null) lmOpts.forest = opts.forest;
        
        if (opts.host != null && opts.port > 0) lmOpts.storage = LMDecodingOptions.Storage.REMOTE;
        if (opts.useJerboa) lmOpts.storage = LMDecodingOptions.Storage.JERBOA;
        if (opts.useBDB) lmOpts.storage = LMDecodingOptions.Storage.BDB;
        if (opts.useCompact) lmOpts.storage = LMDecodingOptions.Storage.COMPACT;
        
		if (opts.meminfo) {
			memuse = memoryBean.getHeapMemoryUsage();
			System.out.printf("MEMUSE: initial: %dM/%dM\n", memuse.getUsed()/1048576, memuse.getMax()/1048576);
		}

        LanguageModel.initDecoding(lmOpts);

        Experiment experiment = Experiment.getInstance();
		
		final InputParser parser = opts.useConstraints 
			? new FLMInputParser(experiment.getTupleDescription()) 
			: new PlainInputParser(experiment.getTupleDescription());
		{
			if (opts.ngram) {
				long[] startTuples = new long[1];
				startTuples[0] = experiment.getTupleDescription().createStartTuple();
				parser.setStartTuples(startTuples);
			}
			long[] endTuples = new long[1];
			endTuples[0] = experiment.getTupleDescription().createEndTuple(); 
			parser.setEndTuples(endTuples);
		}
		final ForestModel forest = experiment.getForest(opts.forest);
		
		if (opts.meminfo) {
			memuse = memoryBean.getHeapMemoryUsage();
			System.out.printf("MEMUSE: after loading LMs: %dM/%dM\n", memuse.getUsed()/1048576, memuse.getMax()/1048576);
		}

		final MutableInteger extraWords = new MutableInteger(0);
		if (parser.getStartTuples() != null) {
			extraWords.set(extraWords.intValue() + parser.getStartTuples().length);
		}
		if (parser.getEndTuples() != null) {
			extraWords.set(extraWords.intValue() + parser.getEndTuples().length);
		}

		final DataSequencer<NbestFormatData> sequencer = new DataSequencer<NbestFormatData>(1);
		final PTBTokenizer tokenizer = new PTBTokenizer();
		
		final Pattern silence_re;
		if (opts.silenceToken != null) {
			silence_re = Pattern.compile("(?<=\\s|^)"+Pattern.quote(opts.silenceToken)+"(?=\\s|$)");
			//System.err.println("Silence pattern: " + silence_re);
		} else {
			silence_re = null;
		}
		final NgramMarginalization marginalizer;
		
		if (opts.ngram) {
			marginalizer = new NgramMarginalization(forest);
		} else {
			marginalizer = null;
		}
		
		class EvalRunnable implements Runnable {
			Pair<Integer, NbestFormatData> pair;
			final boolean tokenize;

			public EvalRunnable(int sentNo, NbestFormatData line) {
				pair = new Pair<Integer, NbestFormatData>(sentNo, line);
				tokenize = opts.tokenize;
			}
			
			public void run() {
				String input = pair.getSecond().getSentence();
				int silenceCount = 0;
				if (silence_re != null) {
					Matcher match = silence_re.matcher(input);
					while(match.find()) {
						++silenceCount;
					}
					match.reset();
					input = match.replaceAll(" ");
				}
				if (tokenize) {
					String words[] = tokenizer.tokenize(input);
					StringBuilder sb = new StringBuilder();
					if (words.length > 0) {
						sb.append(words[0]);
						for(int i=1; i<words.length; ++i) {
							sb.append(' ');
							sb.append(words[i]);
						}
						input = sb.toString();
					}
				}
		    	long[] sentence = parser.parseSentence(input);
		    	
		    	double logProb;
		    	if (opts.ngram) {
		    		byte order = forest.getOrder();
		    		logProb = 0;
		    		int numStartTuples = parser.getStartTuples() == null ? 0 : parser.getStartTuples().length;
		    		
		    		// randomize ngram order for better concurrency
		    		short positions[] = new short[sentence.length-numStartTuples];
		    		for(short i=0; i<positions.length; ++i) {
		    			positions[i] = (short) (i + numStartTuples);
		    		}
		    		RandomUtil.shuffleArray(positions);
		    		
		    		for(short i : positions) {
		    			long[] ngram = Arrays.copyOfRange(sentence, Math.max(0, i-order+1), i+1);
		    			double prob = marginalizer.getNgramProbability(ngram);
		    			logProb += prob;
		    		}
		    	} else {
		    		logProb = forest.getDecoder().evaluateSentence(sentence, opts.useConstraints);
		    	}
		    	//double logProb = forest.getDecoder().evaluateSentence(sentence, opts.useConstraints);
		    	
		    	if (silence_re != null) {
		    		logProb += silenceCount * opts.silencePenalty;
		    	}
		    	pair.getSecond().setLmScore(logProb); 
		    	sequencer.putItem(pair);
				if (opts.meminfo) {
					MemoryUsage memuse = memoryBean.getHeapMemoryUsage();
					System.out.printf("MEMUSE: after evaluating sentence %s: %dM/%dM\n", pair.getFirst().toString(), memuse.getUsed()/1048576, memuse.getMax()/1048576);
					System.out.println("STATS: " + forest.getDecoder().getStatistics());
				}

			}
		}
		
		Timer timer = null;
		if (opts.meminfo) {
			memuse = memoryBean.getHeapMemoryUsage();
			System.out.printf("MEMUSE: after experiment.closeXML(): %dM/%dM\n", memuse.getUsed()/1048576, memuse.getMax()/1048576);
			timer = new Timer(true);
			TimerTask task = new TimerTask() {
				public void run() {
					MemoryUsage memuse = memoryBean.getHeapMemoryUsage();
					DateFormat df = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.LONG);
					System.out.printf("MEMUSE: %s: %dM/%dM\n", df.format(new Date()), memuse.getUsed()/1048576, memuse.getMax()/1048576);					
				}
			};
			timer.scheduleAtFixedRate(task, 0, 10000);
		}

		try {
			BufferedReader reader = new BufferedReader(new InputStreamReader(opts.input == null ? System.in : new FileInputStream(opts.input), 
					Charset.forName("UTF-8")));

			{
				File outDir = new File(opts.output);
				if (!outDir.exists()) {
					if (!outDir.mkdir()) {
						System.err.printf("failed to create output directory: %s\n", outDir.getPath());
						System.exit(1);
					}
				}
			}
		    JobManager manager = JobManager.getInstance();
		    JobGroup group = manager.createJobGroup("decoding");
		    
		    for(String fname = reader.readLine(); fname != null; fname = reader.readLine()) {
		    	try {
		    		File file = new File(fname);
		    		InputStream in = IO.getInputStream(file);
		    		NbestFile nbest = NbestFile.readFile(in);
		    		int lineNo = 0;
		    		for(NbestFormatData data : nbest.getItems()) {
		    			Job job = new Job(new EvalRunnable(++lineNo, data), "decoding sentence #"+Integer.toString(lineNo));
		    			manager.addJob(group, job);
		    		}
		    		in.close();
		    		group.join();
		    		File outFile = new File(opts.output, file.getName());
		    		OutputStream out = IO.getOutputStream(outFile);
		    		nbest.writeFile(out);
		    		out.close();
		    		System.out.printf("done rescoring %s\n", file.getName());
		    	} catch(Exception e) {
		    		System.out.printf("Error processing file %s\n", fname);
		    		e.printStackTrace(System.out);
		    	}
		    }
		} catch(IOException e) {
			e.printStackTrace();
		}
	}

}
