package fpTree;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import mining.CsvReader;
import mining.ItemSetMining;

import com.google.common.collect.Sets;

/**
 * Finds rare item sets by finding all item sets above the minimum support
 * for rare item sets, then removing those that are above the threshold for
 * frequent item sets.
 */
public class FpTree_RareSimple extends ItemSetMining{
	
	// whether to split tree into single prefix path during mining
	private static final boolean SPLIT_TREE = true;	
	
	public Node root;
	
	public FpTree_RareSimple(String filePath) {
		super(filePath);
	}
	
	public Map<Set<Integer>, Integer> run() {
		Node root = new Node();
		this.root = root;
		
		Map<Integer, Node> headerTableLast = new HashMap<Integer, Node>();
		
		Map<Integer, Integer> itemFreqs = countItemSupport();
		List<Integer> fList = createFList(itemFreqs);
		System.out.println("fList " + fList.size() + ": " + fList);
//		System.out.println("itemFreqs: " + itemFreqs);
		
		Map<Integer, Node> headerTable = new LinkedHashMap<Integer, Node>();    
		initialiseLinkedHashMap(headerTable, fList);

		for (CsvReader reader = new CsvReader(this.filePath); reader.hasNext();)
		{
			List<Integer> transaction = new ArrayList<Integer>(reader.read());
			processTransaction(transaction, 1, root, headerTable, headerTableLast, makeMapFromFList(fList));
		}
		
		Map<Set<Integer>, Integer> result = mine(headerTable, root);
		removeItemsetsEqToOrAboveSupport(result, getMinFreqItemsetSupport());
		return result;
	}
	
	private void removeItemsetsEqToOrAboveSupport(Map<Set<Integer>, Integer> itemsets, int maxSupport) {
		Entry<Set<Integer>, Integer> resultPair; 
		for (Iterator<Entry<Set<Integer>, Integer>> i = itemsets.entrySet().iterator(); i.hasNext(); ) {
			resultPair = i.next();
			if (resultPair.getValue() >= maxSupport) {
				i.remove();
			}
		}
	}
	
	public List<Integer> createFList(Map<Integer, Integer> itemFreqs) {
		List<Integer> fList = new ArrayList<Integer>();
		for (int itemName : itemFreqs.keySet()) {
			int support = itemFreqs.get(itemName);
			if (support >= getMinRareItemsetSupport()) {
				fList.add(itemName);
			}
		}
		Collections.sort(fList, new FListComparator(itemFreqs));
		
		return fList;
	}

	/**
	 * Sets up the iteration order of the LinkedHashMap to be the reverse of <tt>fList</tt>.
	 * @param headerTable
	 * @param fList
	 */
	public void initialiseLinkedHashMap(Map<Integer, Node> headerTable, List<Integer> fList) {
		for (int i = fList.size() - 1; i >= 0; i--) {			
			headerTable.put(fList.get(i), null);
		}
	}
	
	/**
	 * @param fList - the list of itemNames ordered by decreasing frequency order.  The <i>reverse</i> order of this list
	 * 			is the iteration order of the returned Header Table.  This list is not modified.
	 * @param root - the Node that will be the root of the new FP-Tree.  This object is modified within the method.
	 * @param transactions - a <tt>Map</tt> containing transactions and their frequencies that are to be added to the FP-Tree.
	 * @return a Header Table of the new FP-Tree created from the transactions 
	 */
	public Map<Integer, Node> processTransactions(Map<List<Integer>, Integer> transactions, final List<Integer> fList, Node root) {
//		Node root = new Node();	// use the root provided
		
		Map<Integer, Node> headerTable = new LinkedHashMap<Integer, Node>();
		// ensuring the order of iteration will be the reverse order of fList
		initialiseLinkedHashMap(headerTable, fList);
		
		// testing
//		ArrayList<Integer> testArray = new ArrayList<Integer>(fList);
//		Collections.reverse(testArray);
//		assert(testArray.equals(new ArrayList<Integer>(headerTable.keySet()))) : "Order is not reversed.";
		
		Map<Integer, Node> headerTableLast = new HashMap<Integer, Node>();
		for (List<Integer> transaction : transactions.keySet()) {
			processTransaction(transaction, transactions.get(transaction), root, headerTable, headerTableLast, makeMapFromFList(fList));
		}
		
		return headerTable;
	}
	
	private Map<Integer, Integer> makeMapFromFList(List<Integer> fList) {
		Map<Integer, Integer> fListMap = new HashMap<Integer, Integer>();
		for (int i = 0; i < fList.size(); i++) {
			fListMap.put(fList.get(i), i);
		}
		return fListMap;
	}
	
	/**
	 * Adds a transaction to an existing FP-Tree (that may just be a root node).  
 	 * Items in the transaction that are below minimum support (i.e. not in <tt>fList</tt>) are removed
	 * @param <tt>headerTableLast</tt> used by processTransaction only, to keep track of the last
	 * 			node in each of the series of Nodes in <tt>headerTable</tt> (i.e. Nodes that were
	 * 			first placed into <tt>headerTable</tt>).   
	 */
	public void processTransaction(List<Integer> transaction, int frequency, 
									Node root, Map<Integer, Node> headerTable, 
									Map<Integer, Node> headerTableLast, 
									final Map<Integer, Integer> fListMap) {
		
		removeItemsBelowSupport(transaction, fListMap.keySet());

		// sort the transaction according to order in fList(fListMap)
//		System.out.println("unsorted: " + transaction);
		Collections.sort(transaction, new Comparator<Integer>() {
			@Override
			public int compare(Integer s1, Integer s2) {
				return fListMap.get(s1) - fListMap.get(s2);
			}
		});		//sort items in this transaction
		
		Node current = root;
		for (int item : transaction) {
			// look for the right child to increment, if one exists 
			if (current.getChildren().containsKey(item)) {				// if child exists, increment count
				current = current.getChildren().get(item);			//TODO: change children to a hashMap to get constant time lookup?
				current.setCount(current.getCount() + frequency);
			} else {										// if it does not, create a child
				Node nn = new Node();
				nn.itemName = item;
				nn.setCount(frequency);						// initialise count
				current.getChildren().put(item, nn);			// add new node to parent
				nn.parent = current;				// add parent to new node
				
				// add node-link
//				headerTable.get(string).add(nn);
				
				// have to test with null and not containsKey because headerTable has been initialised
				if (headerTable.get(item) == null) {
//					System.out.println("adding first");
					headerTable.put(item, nn);
				}
				if (!headerTableLast.containsKey(item)) {
					headerTableLast.put(item, nn);
				} else {
					headerTableLast.put(item, nn).setNodeLink(nn);
				}
				
				current = nn;
			}
		}
	}
	
	// remove items not in fList (i.e. below minSup)
	public void removeItemsBelowSupport(List<Integer> transaction, final Collection<Integer> fList) {
		for (Iterator<Integer> i = transaction.iterator(); i.hasNext();  ) {
			int s = i.next();
			if (!fList.contains(s)) {
				i.remove();
			}
		}
	}
	
	public Map<Set<Integer>, Integer> mine(Map<Integer, Node> headerTable, Node root) {
		// since this is the first call to multiPath, alpha is empty
		Set<Integer> alpha = Collections.emptySet();		
		Map<Set<Integer>, Integer> results = multiPath(headerTable, alpha, root, "");
		
		return results;
	}
	
	public Map<Set<Integer>, Integer> multiPath(Map<Integer, Node> headerTable, final Set<Integer> alpha, Node root, String spacing) {
		spacing = "@" + spacing + " ";
		
//		System.out.println(spacing + "tree conditional on " + alpha + ": ");
//		System.out.println(printTree(root));
		
		Map<Set<Integer>, Integer> freqPatternSetP = null;
		List<Node> prefixPathNodes = null;
		if (SPLIT_TREE) {
			prefixPathNodes = singlePath(root);
			freqPatternSetP = new LinkedHashMap<Set<Integer>, Integer>();
			if (!prefixPathNodes.isEmpty()) {
				// split tree up, change children/parent references
				Node lastPrefixPathNode = prefixPathNodes.get(prefixPathNodes.size() - 1);
				Node branchNode = new Node(); // the null node that becomes root of branching part of the tree
				branchNode.setChildren(lastPrefixPathNode.getChildren());
				for (Node child : lastPrefixPathNode.getChildren().values()) {
					child.parent = branchNode; 
				}
				lastPrefixPathNode.setChildren(Collections.<Integer, Node>emptyMap());

				// mine the prefix path
				Map<Set<Integer>, Integer> mineSinglePrefixPath = mineSinglePrefixPath(prefixPathNodes, alpha, headerTable, spacing);
				freqPatternSetP.putAll(mineSinglePrefixPath);

			}
		}

		List<Integer> keySet = new ArrayList<Integer> (headerTable.keySet());		// all items in Q
		if (SPLIT_TREE) {
			for (Node prefixNode : prefixPathNodes) {
				keySet.remove((Object) prefixNode.itemName);
			}
		}

		Map<Set<Integer>, Integer> freqPatternSetQ = new LinkedHashMap<Set<Integer>, Integer>();
		for (int itemName : keySet) {			// for each item in Q... i.e. branching part
			final Set<Integer> freqPatternQ = new LinkedHashSet<Integer> (alpha);
			freqPatternQ.add(itemName);		// beta = ai UNION alpha (line 10)
			
			Node newRoot = new Node();
			Node firstNodeLink = headerTable.get(itemName);
			Map<Integer, Node> newHeaderTable = constructConditional(firstNodeLink, spacing, newRoot);

//			System.out.println("newHeaderTable: " + newHeaderTable);
//			System.out.println("itemName tally " + itemName + ": " + tallyNodeLinkCount(headerTable.get(itemName)));
			
			Map<Set<Integer>, Integer> multiPath = multiPath(newHeaderTable, freqPatternQ, newRoot, spacing);
			freqPatternSetQ.putAll(multiPath);
			
			// assert freqPatternQ is never added twice into freqPatternSetQ
			assert(!freqPatternSetQ.containsKey(freqPatternQ));
			
			freqPatternSetQ.put(freqPatternQ, tallyNodeLinkCount(firstNodeLink));
		}
		
		Map<Set<Integer>, Integer> results = new LinkedHashMap<Set<Integer>, Integer>();
		results.putAll(freqPatternSetQ);
		
		if (SPLIT_TREE) {
			results.putAll(freqPatternSetP);
			Map<Set<Integer>, Integer> union = crossProduct(freqPatternSetP, freqPatternSetQ);
			results.putAll(union);
		}
//		System.out.println(spacing + "results size: " + results.size());
		return results;
	}
	
	// add up the frequencies of the nodes linked together by NodeLink
	private int tallyNodeLinkCount(Node node) {
		int count = 0;
		for (Node temp = node; temp != null; count += temp.getCount(), temp = temp.getNodeLink());
		return count;
	}
	
	private Map<Set<Integer>, Integer> crossProduct(Map<Set<Integer>, Integer> p, Map<Set<Integer>, Integer> q) {
		Map<Set<Integer>, Integer> result = new LinkedHashMap<Set<Integer>, Integer>();
		for (Set<Integer> pKey : p.keySet()) {
			for (Set<Integer> qKey : q.keySet()) {
				Set<Integer> union = new LinkedHashSet<Integer>(pKey);
				union.addAll(qKey);
				if (p.get(pKey) < q.get(qKey)) {
					result.put(union, p.get(pKey));
				} else {
					result.put(union, q.get(qKey));
				}
			}
		}
		return result;
	}
	
	public Map<Set<Integer>, Integer> mineSinglePrefixPath(List<Node> prefixPathNodes, Set<Integer> alpha, Map<Integer, Node> headerTable, String spacing) {
		Map<Set<Integer>, Integer> results = new LinkedHashMap<Set<Integer>, Integer>();		//TODO necessary to use linked?
		
		//TODO: change to use powerSetIterator
//		Set<Set<Integer>> nodePowerSet = nodePowerSet(prefixPathNodes);
		
		Iterator<Set<Integer>> powerSetIterator = powerSetIterator(prefixPathNodes);
//		System.out.println("calling powerSet with " + prefixPathNodes.size() + " items.");
		
//		Set<Set<Integer>> appendedSets = appendToEach(nodePowerSet, alpha);
			
//		for (Set<Integer> set : appendedSets) {
		while (powerSetIterator.hasNext()) {
			
			LinkedHashSet<Integer> set = new LinkedHashSet<Integer>(powerSetIterator.next());
			
			if (set.isEmpty())		// skip the empty set from power set
				continue;
			
			set.addAll(alpha);
			
			// count the frequency of each item in "set" by going through headerTable
			int min = Integer.MAX_VALUE;
			// for testing
//			if (set.containsAll(Arrays.asList("g", "b", "m")) && set.size() == 3) {
//				System.out.println("hi");
//			}
			
			// since prefixPathNodes are ordered by decreasing frequency, the 
			// last one must have the lowest support (which is the same as that
			// of the conditional nodes alpha).  So to find the support of the
			// itemset, just need to find the last node in the prefix path chain
			// that is also present in the itemset.
			Integer[] setArr = set.toArray(new Integer[set.size()]);		//n
			for (int i = setArr.length - 1; i >= 0; i--) {		//n
				int last = setArr[i];
				if (headerTable.containsKey(last)) {			//n (depth of bucket/number of unique item names)
					min = headerTable.get(last).getCount();
					break;
				}
			}
			
			// same function as above, just different
//			for (int i = prefixPathNodes.size() - 1; i >= 0; i--) {		//n
//				Node prefixNode = prefixPathNodes.get(i);
//				if (set.contains(prefixNode.name)) { 		//n
//					min = prefixNode.count;
//					break;
//				}
//			}
			
			results.put(set, min);
		}
			
			// testing
			// show that items are ordered by support
//			for (int i = prefixPathNodes.size() - 2; i >= 0; i--) {
//				Node prefixNode1 = prefixPathNodes.get(i);
//				Node prefixNode2 = prefixPathNodes.get(i + 1);
//				assert(prefixNode1.count >= prefixNode2.count);
//			}
//			
//			for (Set<Integer> set : appendedSets) {
//				int previous = -1;
//				for (int itemName : set) {
//					Node node = headerTable.get(itemName);
//					if (node != null) {
//						if (previous != -1)
//							assert(node.count <= previous);
//						previous = node.count;
//					}
//				}
//			}
			
			// testing
			// assert that nodes have no links
//			for (Node node : prefixPathNodes) {
//				assert(node.nodeLink == null);
//				assert(headerTable.get(node.name).nodeLink == null);
//				assert(headerTable.get(node.name) == node);
//			}
			
		return results;
	}
	
	/**
	 * Constructs a conditional FP-Tree using a series of <tt>Node</tt> objects that begins 
	 * with <tt>firstNode</tt>, and connected together by <tt>nodeLink</tt>.  The conditional 
	 * tree does not include the item represented by the firstNode.  The returned Header
	 * Table is of the constructed conditional FP-Tree.
	 * 
	 * @param root - the Node that will be used as the root of the new FP-Tree that is
	 * 				constructed
	 * @param firstNode - the first <tt>Node</tt> in a series of nodes
	 * @return the Header Table of the conditional FP-tree that is constructed 
	 * 
	 */
	public Map<Integer, Node> constructConditional(final Node firstNode, String spacing, Node root) {

		// printing node links
//		Node temp = firstNode;
//		StringBuilder sb = new StringBuilder();
//		int i = 0;
//		while (temp != null) {
//			i++;
//			sb.append(temp + " ");
//			temp = temp.nodeLink;
//		}
//		System.out.println("Node links " + i + ": " + sb.toString());
		
		// make conditional pattern base, and count item frequencies
		Map<List<Integer>, Integer> conditionalPatterns = new HashMap<List<Integer>, Integer>();
		Map<Integer, Integer> freqs = new HashMap<Integer, Integer>();	// for storing 1-itemset frequencies
		
		Node linkNode;
		for (linkNode = firstNode; linkNode != null; linkNode = linkNode.getNodeLink()) {
			// build pattern by going up parents
			LinkedList<Integer> pattern = new LinkedList<Integer>();		// stores one conditional pattern

			Node parentNode = linkNode.parent;		// don't include the current node in the header table
			while (parentNode != null && parentNode.itemName != -1) {
				
				// counting 1-itemset frequencies by going thorugh through link nodes and their parents
				if (!freqs.containsKey(parentNode.itemName)) {
					freqs.put(parentNode.itemName, linkNode.getCount());
				} else {
					freqs.put(parentNode.itemName, freqs.get(parentNode.itemName) + linkNode.getCount());
				}
				pattern.addFirst(parentNode.itemName);
				parentNode = parentNode.parent;
			}
			conditionalPatterns.put(pattern, linkNode.getCount());		// add pattern to pattern-base
			
		}
		
		// newFList contains only item names that have over the required support
		List<Integer> newFList = createFList(freqs);
		
//		System.out.println("conditionalPatterns: " + conditionalPatterns);
//		System.out.println();

		// make tree
		Map<Integer, Node> newHeaderTable = processTransactions(conditionalPatterns, newFList, root); // build the tree using transactions
		return newHeaderTable;
	}
	
	public Set<Set<Integer>> nodePowerSet(List<Node> nodes) {
		Set<Set<Integer>> patterns = new LinkedHashSet<Set<Integer>>();
		// must use linked hash set to preserve ordering of the prefix path
		LinkedHashSet<Node> nodesSet = new LinkedHashSet<Node>(nodes);
		for(Set<Node> set : Sets.powerSet(nodesSet)) {
			if (!set.isEmpty()) {		// don't add the empty set to results
				List<Integer> prefix = new ArrayList<Integer>();
				for (Node node : set) {
					prefix.add(node.itemName);
				}
				patterns.add(new LinkedHashSet<Integer>(prefix));
			}
//			System.out.println("pattern: " + set);
		}
		return patterns;
	}
	
	public Iterator<Set<Integer>> powerSetIterator(List<Node> nodes) {
		Set<Integer> nodeItems = new LinkedHashSet<Integer>();
		for (Node node : nodes) {
			nodeItems.add(node.itemName);
		}
		PowerSet<Integer> iterator = new PowerSet<Integer>(nodeItems);
		return iterator;
	}
	
	
	// returns the single prefix path from this node
	// order of path is maintained, where the first element is the root's child, and so on
	// if there is none, returns an empty list
	// skips the "null" root node
	public List<Node> singlePath(Node root) {
		List<Node> path = new ArrayList<Node>();
		Node current = root;
//		System.out.println("node: " + n.name);
		while(current.getChildren().size() == 1) {
			for (Node child : current.getChildren().values()) { // there's only 1 child
				current = child;
			}
//			System.out.println("path: " + current);
			path.add(current);
		}
		return path;
	}
}
