/**
 * 
 */
package edu.umd.clip.lm.tools;

import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.GZIPInputStream;

import java.io.*;

import edu.umd.clip.lm.factors.*;
import edu.umd.clip.lm.factors.Dictionary;
import edu.umd.clip.lm.util.tree.*;
import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.util.*;
import edu.umd.clip.lm.util.algo.*;
import edu.umd.clip.jobs.*;

import edu.berkeley.nlp.ling.Tree;
import edu.berkeley.nlp.util.*;
/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class HFTTrainerMDI {

	public static class Options {
        @Option(name = "-input", required = false, usage = "Training data file (Default: stdin)")
		public String input;
        @Option(name = "-config", required = true, usage = "XML config file")
		public String config;
        @Option(name = "-jobs", usage = "number of concurrent jobs (default: 1)")
        public int jobs = 1;
        @Option(name = "-subtrees", usage = "Subtrees for latent variables from the parser")
		public String subtrees;
        @Option(name = "-words", usage = "Use words for the context (default: tags)")
		public boolean words;
	}
	
	@SuppressWarnings("unchecked")
	public static void trainHFT(BufferedReader[] input, boolean words, String subtrees, int jobs) throws IOException, ClassNotFoundException {
		Experiment experiment = Experiment.getInstance();

		final FactorTupleDescription tupleDesc = experiment.getTupleDescription();

		byte[] hiddenFactors = tupleDesc.getHiddenFactors();
		FactorDescription[] descs = tupleDesc.getDescriptions();
		
		AnyaryTree<FactorTuple> root = new AnyaryTree<FactorTuple>(null);
		final HashMap<FactorTuple, AnyaryTree<FactorTuple>> leaves = new HashMap<FactorTuple, AnyaryTree<FactorTuple>>(); 
		
		byte[] independentFactors = new byte[hiddenFactors.length];
		{
			int count = 0;
			for(byte i=0; i<hiddenFactors.length; ++i) {
				if (descs[hiddenFactors[i]].getParent() == null) {
					independentFactors[count++] = hiddenFactors[i];
				}
			}
			if (independentFactors.length != count) {
				independentFactors = Arrays.copyOf(independentFactors, count);
			}
			
			// TODO: rewrite it in a generic manner to support arbitrary depth of dependent factors
			iterateThroughSpace(root, tupleDesc, independentFactors, 0, new FactorTuple().getValues());

			if (count < hiddenFactors.length && subtrees == null) {
				// there are dependent factors
				byte[] dependentFactors = new byte[hiddenFactors.length - count];
				count = 0;
				for(byte i=0; i<hiddenFactors.length; ++i) {
					if (descs[hiddenFactors[i]].getParent() != null) {
						dependentFactors[count++] = hiddenFactors[i];
					}
				}
				HashSet<AnyaryTree<FactorTuple>> children = root.getChildren();
				if (children != null) {
					for(AnyaryTree<FactorTuple> child : children) {
						iterateThroughSpace(child, tupleDesc, dependentFactors, 0, child.getPayload().getValues().clone());
					}
				}
			}
			AnyaryTreeFinder<FactorTuple> finder = new AnyaryTreeFinder<FactorTuple>() {
				public boolean apply(AnyaryTree<FactorTuple> node) {
					if (node.isLeaf()) {
						leaves.put(node.getPayload(), node);
					}
					return false;
				}
			};
			root.searchPostorder(finder);
		}
		
		try {
			FileWriter treeOutput = new FileWriter("tree-before.dot");
			GraphvizOutput graph = new GraphvizOutput("HFT") {
				protected <T> String getText(T payload) {
					FactorTuple t = (FactorTuple) payload;
					return t.toStringNoNull();
				}
			};
			treeOutput.write(graph.getOutput(root));
			treeOutput.flush();
		} catch(IOException e) {
			e.printStackTrace();
		}
		
		// now collect the statistics
		// TODO: LM order is assumed to be 3
		FLMInputParser parser = new FLMInputParser(tupleDesc);
		long[] startTuples = new long[1];
		startTuples[0] = tupleDesc.createStartTuple();
		//startTuples[1] = tupleDesc.createStartTuple();
		parser.setStartTuples(startTuples);
		long[] endTuples = new long[1];
		endTuples[0] = tupleDesc.createEndTuple();
		parser.setEndTuples(endTuples);
		
		class Level {
			public HashMap<FactorTuple, Long2DoubleMap> leftCounts;
			public HashMap<FactorTuple, Long2DoubleMap> rightCounts;
			public long tuple;
			public HashSet<FactorTuple> vocab;
			public Level(long tuple) {
				this.tuple = tuple;
				leftCounts = new HashMap<FactorTuple, Long2DoubleMap>(100);
				rightCounts = new HashMap<FactorTuple, Long2DoubleMap>(100);
			}
		}
		
		//AnyaryTree<FactorTuple> aLeaf = leaves.values().iterator().next();
		final LinkedList<Level> levels = new LinkedList<Level>();
		
		{
			AnyaryTreeFinder<FactorTuple> finder = new AnyaryTreeFinder<FactorTuple>() {
				public boolean apply(AnyaryTree<FactorTuple> node) {
					if (!node.isLeaf()) {
						long bits = 0;
						FactorTuple tuple = node.getPayload();
						if (tuple != null) bits = tuple.getBits();
						Level level = new Level(bits);
						level.vocab = new HashSet<FactorTuple>(node.getChildren().size());
						for(AnyaryTree<FactorTuple> child : node.getChildren()) {
							level.vocab.add(child.getPayload());
						}
						levels.add(level);
					}
					return false;
				}
			};
			root.searchPreorder(finder);
		}
		/*
		while(aLeaf != root) {
			aLeaf = aLeaf.getParent();
			levels.add(new Level(aLeaf.getPayload()));
		}
		*/
		HashMap<FactorTuple, FactorTuple> wordVocab = new HashMap<FactorTuple,FactorTuple>(10000);
		
		final AtomicLong totalCounts = new AtomicLong();
		final long factorsMask = words ? tupleDesc.getOvertFactorsMask() : tupleDesc.getHiddenFactorsMask();
		
		ParallelInputParser.Callback parserCallback = new ParallelInputParser.Callback(){
			public void process(long[] sentence) {
				for(int i=0; i<sentence.length-1; ++i) {
					long tuple1 = sentence[i];
					long tuple2 = sentence[i+1];
					
					//AnyaryTree<FactorTuple> leaf1 = leaves.get(tuple1);
					//AnyaryTree<FactorTuple> leaf2 = leaves.get(tuple2);
					for(Level level : levels) {
						//tuple1 = leaf1.getPayload();
						//tuple2 = leaf2.getPayload();
						
						long hidden1 = tuple1 & FactorTuple.getHiddenMask();
						long hidden2 = tuple2 & FactorTuple.getHiddenMask();

						if (FactorTuple.matches(hidden2, level.tuple)) {
							long word1 = tuple1 & factorsMask;
							
							for(FactorTuple vocabTuple : level.vocab) {
								if (FactorTuple.matches(hidden2, vocabTuple)) {
									synchronized(level.leftCounts) {
										Long2DoubleMap leftMap = level.leftCounts.get(vocabTuple);
										if (leftMap == null) {
											leftMap = new Long2DoubleMap(level.leftCounts.size());
											level.leftCounts.put(vocabTuple, leftMap);
										}
										
										leftMap.addAndGet(word1, 1.0);
									}
									break;
								}
							}
						}
						
						if (FactorTuple.matches(hidden1, level.tuple)) {
							long word2 = tuple2 & factorsMask;
							
							for(FactorTuple vocabTuple : level.vocab) {
								if (FactorTuple.matches(hidden1, vocabTuple)) {
									synchronized(level.rightCounts) {
										Long2DoubleMap rightMap = level.rightCounts.get(vocabTuple);
										if (rightMap == null) {
											rightMap = new Long2DoubleMap(level.rightCounts.size());
											level.rightCounts.put(vocabTuple, rightMap);
										}
										
										rightMap.addAndGet(word2, 1.0);
									}
									break;
								}
							}
						}
					}
					totalCounts.incrementAndGet();
					//System.out.println("size = " + Integer.toString(counts.size()));
				}
			}
		};
		ParallelInputParser inputParser = new ParallelInputParser(parser, parserCallback, jobs, false);
		
		for(BufferedReader reader : input) {
			inputParser.parse(reader);
		}
		
		double revTotalCount = 1.0 / totalCounts.longValue();
		
		for(Level level : levels) {
			for(Map.Entry<FactorTuple, Long2DoubleMap> entry : level.leftCounts.entrySet()) {
				// normalize
				for(Long2DoubleMap.Iterator i = entry.getValue().iterator(); i.hasNext(); ) {
					Long2DoubleMap.Entry e = i.next();
					e.setValue(e.getValue() * revTotalCount);
				}
			}
			
			for(Map.Entry<FactorTuple, Long2DoubleMap> entry : level.rightCounts.entrySet()) {
				// normalize
				for(Long2DoubleMap.Iterator i = entry.getValue().iterator(); i.hasNext(); ) {
					Long2DoubleMap.Entry e = i.next();
					e.setValue(e.getValue() * revTotalCount);
				}
			}
		}
		final MutableInteger splitCount = new MutableInteger(0);
		
		final Map<Collection<FactorTuple>, AnyaryTree<FactorTuple>> treeNodes = Collections.synchronizedMap(new HashMap<Collection<FactorTuple>, AnyaryTree<FactorTuple>>());
		MDIClusterNotifier<FactorTuple> MDINotifier = new MDIClusterNotifier<FactorTuple>() {

			/* (non-Javadoc)
			 * @see edu.umd.clip.lm.util.algo.MDIClusterNotifier#notify(java.util.Collection, java.util.Collection, java.util.Collection)
			 */
			public boolean notify(Collection<FactorTuple> oldCluster,
					Collection<FactorTuple> cluster1, Collection<FactorTuple> cluster2) {
				AnyaryTree<FactorTuple> node = treeNodes.get(oldCluster);
				FactorTuple label1 = null;
				if (cluster1.size() == 1) {
					label1 = cluster1.iterator().next();
				}
				AnyaryTree<FactorTuple> subnode1 = new AnyaryTree<FactorTuple>(label1);
				node.addChild(subnode1);
				
				FactorTuple label2 = null;
				if (cluster2.size() == 1) {
					label2 = cluster2.iterator().next();
				}
				AnyaryTree<FactorTuple> subnode2 = new AnyaryTree<FactorTuple>(label2);
				node.addChild(subnode2);
				
				treeNodes.put(cluster1, subnode1);
				treeNodes.put(cluster2, subnode2);
				return true;
			}
		};
		
		AnyaryTree<FactorTuple> theRoot = null;
		for(ListIterator<Level> it = levels.listIterator(0); it.hasNext();) {
			Level level = it.next();
			HashSet<FactorTuple> vocab = new HashSet<FactorTuple>(level.vocab.size());
			for(FactorTuple tuple : level.vocab) {
				vocab.add(tuple);
			}
			AnyaryTree<FactorTuple> treeRoot;
			{
				LinkedList<FactorTuple> rootVocab = new LinkedList<FactorTuple>();
				rootVocab.add(new FactorTuple(level.tuple));
				treeRoot = treeNodes.get(rootVocab);
			}
			if (treeRoot == null) {
				treeRoot = new AnyaryTree<FactorTuple>(new FactorTuple(level.tuple));
			}
			treeNodes.put(vocab, treeRoot);
			if (theRoot == null) {
				theRoot = treeRoot;
			}
			
			MDI<FactorTuple,FactorTuple> algo = new MDI<FactorTuple,FactorTuple>(vocab, wordVocab.keySet());
			algo.setNotifier(MDINotifier);
			
			for(Map.Entry<FactorTuple, Long2DoubleMap> entry : level.leftCounts.entrySet()) {
				//String word = entry.getKey().toStringNoNull();
				for(Long2DoubleMap.Iterator i = entry.getValue().iterator(); i.hasNext(); ) {
					Long2DoubleMap.Entry e = i.next();
					if (e.getKey() == 0) {
						System.out.println("qwe");
					}
				}
			}

			for(Map.Entry<FactorTuple, Long2DoubleMap> entry : level.leftCounts.entrySet()) {
				FactorTuple word = entry.getKey();
				for(Long2DoubleMap.Iterator i = entry.getValue().iterator(); i.hasNext(); ) {
					Long2DoubleMap.Entry e = i.next();
					FactorTuple tmpTuple = getCachedTuple(wordVocab, new FactorTuple(e.getKey()));
					/*
					for(FactorTuple tuple : level.vocab) {
						if (tmpTuple.matches(tuple)) {
							tmpTuple = tuple;
							break;
						}
					}
					*/
					algo.setLeftProb(word, tmpTuple, e.getValue());
				}
			}
			
			for(Map.Entry<FactorTuple, Long2DoubleMap> entry : level.rightCounts.entrySet()) {
				FactorTuple word = entry.getKey();
				for(Long2DoubleMap.Iterator i = entry.getValue().iterator(); i.hasNext(); ) {
					Long2DoubleMap.Entry e = i.next();
					FactorTuple tmpTuple = getCachedTuple(wordVocab, new FactorTuple(e.getKey()));
					/*
					for(FactorTuple tuple : level.vocab) {
						if (tmpTuple.matches(tuple)) {
							tmpTuple = tuple;
							break;
						}
					}
					*/
					algo.setRightProb(word, tmpTuple, e.getValue());
				}
			}
			algo.normalizeDistributions();
			algo.partition(vocab);
			
			splitCount.add(1);
			//dumpTree(theRoot, "tree-after-split-"+splitCount.toString()+".dot");
			//dumpTree(treeRoot, "tree-"+level.tuple.toStringNoNull()+".dot");
		}

		//dumpTree(theRoot, "tree-after.dot");

		HFT<HFTPayload> hft = new HFT<HFTPayload>(makeHFT(new BinaryPrefix(new BitSet(0), 0), theRoot));
		
		if (subtrees != null) {
	        FileInputStream fis = new FileInputStream(subtrees); // load from file
	        GZIPInputStream gzis = new GZIPInputStream(fis); // compressed
	        ObjectInputStream in = new ObjectInputStream(gzis); // load objects

	        Map<String, Tree<Integer>> splitTreeMap = (Map<String, Tree<Integer>>) in.readObject(); // finished the grammars
	        in.close();
	        
	        assert(independentFactors.length == 1);
	        byte tagFactor = independentFactors[0];
	        Dictionary tagDict = tupleDesc.getDictionary(tagFactor);
	        byte superTagIdx = -1;
			for(byte i=0; i<hiddenFactors.length; ++i) {
				if (tupleDesc.getParentIndex(hiddenFactors[i]) == independentFactors[0]) {
					superTagIdx = hiddenFactors[i];
					break;
				}
			}
			assert(superTagIdx != -1);

			ArrayList<BinaryTree<HFTPayload>> hftLeaves = new ArrayList<BinaryTree<HFTPayload>>(50);
			
			for(BinaryTreeIterator<HFTPayload> iterator = hft.getTree().getLeafIterator(); iterator.hasNext();) {
				hftLeaves.add(iterator.nextNode());
			}
			
			for(BinaryTree<HFTPayload> node : hftLeaves) {
				long tuple = node.getPayload().tuple;
				if (tuple == 0) continue;
				
				int factorValue = FactorTuple.getValue(tuple, tagFactor);
				Tree<Integer> splitTree = splitTreeMap.get(tagDict.getWord(factorValue));
				if (splitTree == null) {
					if (Dictionary.isStart(factorValue)) {
						splitTree = new Tree<Integer>(Dictionary.getStart());
					} else if (Dictionary.isEnd(factorValue)) {
						splitTree = new Tree<Integer>(Dictionary.getEnd());
					} else {
						System.err.printf("Can't find split tree for %s, creating empty\n", FactorTuple.toStringNoNull(tuple));
						splitTree = new Tree<Integer>(Dictionary.FIRST_NORMAL_VALUE);
					}
				} else {
					// adjust tree
					Iterator<Tree<Integer>> iterator = splitTree.iterator();
					for(; iterator.hasNext();) {
						Tree<Integer> t = iterator.next();
						t.setLabel(t.getLabel() + Dictionary.FIRST_NORMAL_VALUE);
					}
				}
				// remove unary chains
				normalizeTree(splitTree);
				
				BinaryTree<HFTPayload> newNode = extendHFT(node.getPayload().prefix, splitTree, tuple, tagFactor, superTagIdx);
				BinaryTree<HFTPayload> parent = node.getParent();
				if (parent.getLeft() == node) {
					parent.attachLeft(newNode);
				} else {
					parent.attachRight(newNode);
				}
			}
		}

		dumpTree(hft.getTree(), "tree-after.dot");
		
		experiment.setHFT(hft);
		
		
	}
	/**
	 * @param args
	 * @throws IOException 
	 * @throws ClassNotFoundException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException {
        OptionParser optParser = new OptionParser(Options.class);
        Options opts = (Options) optParser.parse(args, true);

        JobManager.initialize(opts.jobs);
		Thread thread = new Thread(JobManager.getInstance(), "Job Manager");
		thread.setDaemon(true);
		thread.start();

		Experiment.initialize(opts.config);
		Experiment experiment = Experiment.getInstance();

		BufferedReader[] readers = new BufferedReader[]{IO.getReader(opts.input == null ? System.in : IO.getInputStream(opts.input))};

		trainHFT(readers, opts.words, opts.subtrees, opts.jobs);
		try {
			experiment.saveConfig(opts.config);
		} catch(IOException e) {
			e.printStackTrace();
		}
		
	}

	private static void normalizeTree(Tree<Integer> tree) {
		if (!tree.isLeaf()) {
			List<Tree<Integer>> children = tree.getChildren();
			if (children.size() == 1) {
				Tree<Integer> child = children.get(0);
				tree.setChildren(child.getChildren());
				tree.setLabel(child.getLabel());
				normalizeTree(tree);
			} else {
				for(Tree<Integer> child : children) {
					normalizeTree(child);
				}
			}
		}
	}
	
	private static BinaryTree<HFTPayload> extendHFT(BinaryPrefix prefix, Tree<Integer> splitTree, 
			long tuple, 
			byte tagIdx,
			byte superTagIdx) 
	{
		BinaryTree<HFTPayload> tree = new BinaryTree<HFTPayload>(new HFTPayload(prefix, tuple));

		if (splitTree.isLeaf()) {
			//FactorTupleDescription desc = Experiment.getInstance().getTupleDescription();
			//String label = desc.getDictionary(tagIdx).getWord(tuple.getValue(tagIdx)) + "-" + splitTree.getLabel().toString();
			//Dictionary dict = desc.getDescription(superTagIdx).getDictionary(tuple.getValue(tagIdx));
			tree.getPayload().tuple = FactorTuple.setValue(tuple, superTagIdx, splitTree.getLabel());
		} else {
			List<Tree<Integer>> children = splitTree.getChildren();
			assert(children.size() == 2);
			Iterator<Tree<Integer>> iterator = children.iterator();
			Tree<Integer> left = iterator.next();
			Tree<Integer> right = iterator.next();
			
			tree.attachLeft(extendHFT(prefix.appendZero(), left, tuple, tagIdx, superTagIdx));
			tree.attachRight(extendHFT(prefix.appendOne(), right, tuple, tagIdx, superTagIdx));
		}
		return tree;

	}
	private static FactorTuple getCachedTuple(HashMap<FactorTuple,FactorTuple> cache, FactorTuple tuple) {
		FactorTuple t = cache.get(tuple);
		if (t == null) {
			cache.put(tuple, tuple);
			return tuple;
		}
		return t;
	}
	
	private static void dumpTree(BinaryTree<HFTPayload> root, String filename) {
		try {
			FileWriter treeOutput = new FileWriter(filename);
			GraphvizOutput graph = new GraphvizOutput("HFT") {
				@Override
				public <T> String getText(T payload) {
					long tuple = ((HFTPayload) payload).tuple;
					return tuple == 0 ? "" : FactorTuple.toStringNoNull(tuple);
				}
			};
			treeOutput.write(graph.getOutput(root));
			treeOutput.close();
		} catch(IOException e) {
			e.printStackTrace();
		}
	}
	
	private static void iterateThroughSpace(AnyaryTree<FactorTuple> tree, 
			FactorTupleDescription tupleDesc, 
			byte[] factors, 
			int pos, 
			int[] values) 
	{
		boolean isRoot = tree.getPayload() == null;
		FactorDescription desc = tupleDesc.getDescription(factors[pos]);
		edu.umd.clip.lm.factors.Dictionary dict;
		byte parentIdx = tupleDesc.getParentIndex(factors[pos]);
		if (parentIdx == -1) 
			dict = desc.getDictionary();
		else 
			dict = desc.getDictionary(values[parentIdx]);
		
		DictionaryIterator iter = dict.iterator(isRoot);
		if (iter.hasNext()) {
			// we don't want to remove payload if there are no children
			//tree.setPayload(null);
		} else {
			if (Dictionary.isStart(values[parentIdx]) || Dictionary.isEnd(values[parentIdx])) {
				// all dependent <s> or </s> are the same 
				values[factors[pos]] = values[parentIdx];
				tree.addChild(new AnyaryTree<FactorTuple>(new FactorTuple(tupleDesc.createTuple(values))));
			}
		}
		while(iter.hasNext()) {
			int value = iter.next();
			if (Dictionary.isNull(value) || Dictionary.isUnk(value)) continue;
			
			values[factors[pos]] = value;
			if (pos == factors.length-1) {
				tree.addChild(new AnyaryTree<FactorTuple>(new FactorTuple(tupleDesc.createTuple(values))));
			} else {
				iterateThroughSpace(tree, tupleDesc, factors, pos+1, values);
			}
		}
		HashSet<AnyaryTree<FactorTuple>> children = tree.getChildren();
		if (children != null && children.size() == 1) {
			// replace the sole child with grand-children
			AnyaryTree<FactorTuple> child = children.iterator().next(); 
			tree.setChildren(child.getChildren());
			tree.setPayload(child.getPayload());
		}
	}
	
	private static BinaryTree<HFTPayload> makeHFT(BinaryPrefix prefix, AnyaryTree<FactorTuple> node) {
		FactorTuple tuple = node.getPayload();
		BinaryTree<HFTPayload> tree = new BinaryTree<HFTPayload>(new HFTPayload(prefix, tuple == null ? 0 : tuple.getBits()));

		HashSet<AnyaryTree<FactorTuple>> children = node.getChildren();
		if (children != null) {
			assert(children.size() == 2);
			Iterator<AnyaryTree<FactorTuple>> iterator = children.iterator();
			AnyaryTree<FactorTuple> left = iterator.next();
			AnyaryTree<FactorTuple> right = iterator.next();
			
			tree.attachLeft(makeHFT(prefix.appendZero(), left));
			tree.attachRight(makeHFT(prefix.appendOne(), right));
		}
		return tree;
	}
}
