package fpTree;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;

import stats.RpTreeStats;

import fpTree.FListComparator;
import fpTree.Node;

import mining.CsvReader;
import mining.ItemSetMining;
import mining.WriteStats;

/**
 * Only returns results that includes a rare item (i.e. below minimum support). Does this by constructing an FP-Tree that using only
 * transactions that includes a rare item. In the resulting tree, which also includes frequent items, only those nodes that are rare are
 * mined.
 * 
 * Therefore, does NOT return all rare itemsets - only those itemsets that include a rare item. 
 * 
 * Only itemsets within the specified support range are included in the results.
 */
public class RpTree extends ItemSetMining
{
	
	// whether to split tree into single prefix path during mining
	
	public Node freqRoot;
	public Node rareRoot;
	private BufferedWriter bw;
	private final static boolean write = false;
	
	private final List<Integer> aList = new ArrayList<Integer>(); // list of all items
	private final List<Integer> fList = new ArrayList<Integer>(); // list of freq items
	private final List<Integer> rList = new ArrayList<Integer>(); // list of rare items
	
	final private RpTreeStats stats;
	
	public RpTree(String filePath)
	{
		super(filePath);
		
		this.stats = new RpTreeStats();
		this.stats.className = this.getClass().getSimpleName();
		this.stats.write = write;
		this.stats.location = filePath;
	}
	
	public void run()
	{
		if (write)
		{
			try
			{
				this.filename = this.filePath + "-" + this.getMinRareItemsetSupport() + "-" + this.getMinFreqItemsetSupport() + 
						"-" + this.getClass().getSimpleName();
				this.bw = new BufferedWriter(new FileWriter(this.filename));
			} catch (IOException e)
			{
				e.printStackTrace();
			}
		}
		
//		System.out.println("Running: " + this.getClass().getSimpleName());
//		System.out.println("minRareSup: " + this.getMinRareItemSupport() + " | propRareItem: " + this.getPropRareItem()
//				+ " | minRareItemsetSup: " + this.getMinRareItemsetSupport() + " | minFreqItemsetSup: " + this.getMinFreqItemsetSupport());
		
//		this.stats.minRareSup = this.getMinRareItemSupport();
		this.stats.minRareItemsetSup = this.getMinRareItemsetSupport();
		this.stats.minFreqItemsetSup = this.getMinFreqItemsetSupport();
		
		// start timing
		long t1 = System.nanoTime();
		
//		Node freqRoot = new Node(); // FREQ TREE
		Node rareRoot = new Node();
		
		Vector<Integer> itemFreqs = this.findDbInfo();
//		System.out.println("itemFreqs " + itemFreqs.size() + " | " + itemFreqs);
		
		BitSet itemsBelowSupport = new BitSet(itemFreqs.size());
		Map<Integer, Integer> itemFreqsMap = new HashMap<Integer, Integer>();
		for (int item = 0; item < itemFreqs.size(); item++)
		{
			if (itemFreqs.get(item) < this.getMinRareItemsetSupport())
			{
				itemsBelowSupport.set(item);
			} else {
				itemFreqsMap.put(item, itemFreqs.get(item));
			}
		}
		
		createListsMinFreq(itemFreqsMap, this.aList, this.fList, this.rList);
		this.stats.iterations = this.rList.size();
//		System.out.println("itemFreqsMap: " + itemFreqsMap);
//		System.out.println("aList: " + aList);
//		System.out.println("rList: " + rList);
		
//		System.out.println("aList " + this.aList.size() + ": " + this.aList);
//		System.out.println("fList " + this.fList.size() + ": " + this.fList);
//		System.out.println("rList " + this.rList.size() + ": " + this.rList);
		
		// System.out.println("itemFreqs: " + itemFreqs);
		
//		Map<Integer, Node> fHeaderTableLast = new HashMap<Integer, Node>(); // FREQ TREE
//		Map<Integer, Node> fHeaderTable = new LinkedHashMap<Integer, Node>(); // FREQ TREE
//		initialiseLinkedHashMap(fHeaderTable, this.fList); // FREQ TREE
		
		Map<Integer, Node> rHeaderTableLast = new HashMap<Integer, Node>();
		Map<Integer, Node> rHeaderTable = new LinkedHashMap<Integer, Node>();
		initialiseLinkedHashMap(rHeaderTable, this.rList);
		
		final Map<Integer, Integer> aListMap = listToMap(this.aList);
		
		for (CsvReader reader = new CsvReader(this.filePath); reader.hasNext();)
		{
			List<Integer> transaction = new ArrayList<Integer>(reader.readIgnore(itemsBelowSupport));
			if (transaction.isEmpty())
				continue;
			
			// sort items in this transaction
			Collections.sort(transaction, new Comparator<Integer>()
			{
				@Override
				public int compare(Integer s1, Integer s2)
				{
					return aListMap.get(s1) - aListMap.get(s2);
				}
			});
			
			if (this.rList.contains(transaction.get(transaction.size() - 1)))
			{ // transaction has rare item
//				System.out.println("rare: " + transaction);
				processTransaction(transaction, 1, rareRoot, rHeaderTable, rHeaderTableLast);
			} else
			{ // transaction does not have rare item
//				System.out.println("freq: " + transaction);
//				processTransaction(transaction, 1, freqRoot, fHeaderTable, fHeaderTableLast); // FREQ TREE
			}
		}
//		System.out.println(freqRoot.printTree());
//		System.out.println(rareRoot.printTree());
		
		// remove non-rare items from rHeaderTable, so that non-rare items will not be mined
		// TODO: can actually change processTransaction so that non-rare items are not even added
		// to the header table, but are instead only added to the tree.
		rHeaderTable.keySet().retainAll(this.rList);
//		System.out.println("headerTable keySet(): " + rHeaderTable.keySet().size() + ": " + rHeaderTable.keySet());
		
		multiPath(rHeaderTable, Collections.<Integer> emptySet(), rareRoot, "");
		
		if (write)
		{
			try
			{
				this.bw.close();
			} catch (IOException e)
			{
				e.printStackTrace();
			}
		}
		
		long t2 = System.nanoTime();
		this.stats.runtime = (t2 - t1) / 1000000;
		
		System.out.println(this.stats);
		WriteStats.write(this.stats);
	}
	
	private void createListsMinFreq(Map<Integer, Integer> itemFreqs, List<Integer> aList, List<Integer> fList, List<Integer> rList)
	{
		aList.addAll(itemFreqs.keySet());
		Comparator<Integer> comparator = new FListComparator(itemFreqs);
		Collections.sort(aList, comparator);
		
		int splitIndex = -1;
		
		for (int i = 0; i < aList.size(); i++)
		{
			if (itemFreqs.get(aList.get(i)) < this.getMinFreqItemsetSupport())
			{
				splitIndex = i;
				break;
			}
		}
		
		fList.addAll(aList.subList(0, splitIndex));
		rList.addAll(aList.subList(splitIndex, aList.size()));
	}

	private List<Integer> createFList(Map<Integer, Integer> itemFreqs)
	{
		List<Integer> fList = new ArrayList<Integer>();
		
		for (int itemName : itemFreqs.keySet())
		{
			if (itemFreqs.get(itemName) >= getMinRareItemsetSupport())
				fList.add(itemName);
		}
		
		Collections.sort(fList, new FListComparator(itemFreqs));
		
		return fList;
	}
	
	/**
	 * Sets up the iteration order of the LinkedHashMap to be the reverse of <tt>fList</tt>.
	 * 
	 * @param headerTable
	 * @param fList
	 */
	private void initialiseLinkedHashMap(Map<Integer, Node> headerTable, List<Integer> fList)
	{
		for (int i = fList.size() - 1; i >= 0; i--)
		{
			headerTable.put(fList.get(i), null);
		}
	}
	
	// returns header table of generated fptree
	
	/**
	 * @param fList
	 *            - the list of itemNames ordered by decreasing frequency order. The <i>reverse</i> order of this list is the iteration
	 *            order of the returned Header Table. This list is not modified.
	 * @param root
	 *            - the Node that will be the root of the new FP-Tree. This object is modified within the method.
	 * @param transactions
	 *            - a <tt>Map</tt> containing transactions and their frequencies that are to be added to the FP-Tree.
	 * @return a Header Table of the new FP-Tree created from the transactions
	 */
	private Map<Integer, Node> processTransactions(Map<List<Integer>, Integer> transactions, final List<Integer> fList,
													Node root)
	{
		// Node root = new Node(); // use the root provided
		
		Map<Integer, Node> headerTable = new LinkedHashMap<Integer, Node>();
		// ensuring the order of iteration will be the reverse order of fList
		initialiseLinkedHashMap(headerTable, fList);
		
		final Map<Integer, Integer> fListMap = listToMap(fList);
		
		// testing
		// ArrayList<Integer> testArray = new ArrayList<Integer>(fList);
		// Collections.reverse(testArray);
		// assert(testArray.equals(new
		// ArrayList<Integer>(headerTable.keySet()))) :
		// "Order is not reversed.";
		
		Map<Integer, Node> headerTableLast = new HashMap<Integer, Node>();
		Comparator<Integer> comparator = new Comparator<Integer>()
		{
			@Override
			public int compare(Integer s1, Integer s2)
			{
				return fListMap.get(s1) - fListMap.get(s2);
			}
		};
		
		for (List<Integer> transaction : transactions.keySet())
		{
			
			// this needs to be before sorting...
			int frequency = transactions.get(transaction);
			
			this.retainItemsInList(transaction, fList);
			
			// TODO: HAVE TO SORT TRANSACTION BEFORE PASSING TO processTransaction
			Collections.sort(transaction, comparator); // sort items in this transaction
			
			processTransaction(transaction, frequency, root, headerTable, headerTableLast);
		}
		
		return headerTable;
	}
	
	/**
	 * Index of the list becomes values, list values becomes keys. 
	 */
	private Map<Integer, Integer> listToMap(List<Integer> list)
	{
		Map<Integer, Integer> listMap = new HashMap<Integer, Integer>();
		for (int i = 0; i < list.size(); i++)
		{
			listMap.put(list.get(i), i);
		}
		return listMap;
	}
	
	/**
	 * Adds a transaction to an existing FP-Tree (that may just be a root node).
	 * 
	 * @param <tt>headerTableLast</tt> used by processTransaction only, to keep track of the last node in each of the series of Nodes in
	 *        <tt>headerTable</tt> (i.e. Nodes that were first placed into <tt>headerTable</tt>).
	 */
	private void processTransaction(List<Integer> transaction, int frequency, Node root, Map<Integer, Node> headerTable,
									Map<Integer, Node> headerTableLast)
	{
		
		Node current = root;
		for (int item : transaction)
		{
			// look for the right child to increment, if one exists
			if (current.getChildren().containsKey(item))
			{ // if child exists,
				// increment count
				current = current.getChildren().get(item);
				current.setCount(current.getCount() + frequency);
			} else
			{ // if it does not, create a child
				Node nn = new Node();
				nn.itemName = item;
				nn.setCount(frequency); // initialise count
				current.getChildren().put(item, nn); // add new node to parent
				nn.parent = current; // add parent to new node
				
				// add node-link
				// headerTable.get(string).add(nn);
				
				// have to test with null and not containsKey because
				// headerTable has been initialised
				if (headerTable.get(item) == null)
				{
					// System.out.println("adding first");
					headerTable.put(item, nn);
				}
				if (!headerTableLast.containsKey(item))
				{
					headerTableLast.put(item, nn);
				} else
				{
					headerTableLast.put(item, nn).setNodeLink(nn);
				}
				
				current = nn;
			}
		}
	}
	
	/**
	 * Elements within <tt>itemset</tt> that are not in <tt>includeList</tt> are removed.
	 * Directly modifies <tt>itemset</tt>.
	 */
	private void retainItemsInList(Collection<Integer> itemset, final Collection<Integer> includeList)
	{
		for (Iterator<Integer> i = itemset.iterator(); i.hasNext();)
		{
			int s = i.next();
			if (!includeList.contains(s))
			{
				i.remove();
			}
		}
	}
	
	long iterationItemsetCount;
	
	private void multiPath(Map<Integer, Node> headerTable, final Set<Integer> alpha, Node root, String spacing)
	{
		spacing = "@" + spacing + " ";
		
//		System.out.println(spacing + "going to mine items in this order: " + headerTable.keySet());
		for (int itemName : headerTable.keySet())
		{ // for each item in Q... i.e. branching part
		
			if (alpha.isEmpty())
			{
				this.iterationItemsetCount = 0;
			}
			
			final Set<Integer> freqPatternQ = new LinkedHashSet<Integer>(alpha);
			freqPatternQ.add(itemName); // beta = ai UNION alpha (line 10)
			
			Node newRoot = new Node();
			Node firstNodeLink = headerTable.get(itemName);
			Map<Integer, Node> newHeaderTable = constructConditional(firstNodeLink, spacing, newRoot);
			
//			System.out.println("conditional on: " + firstNodeLink.itemName);
//			System.out.println(newRoot.printTree());
			multiPath(newHeaderTable, freqPatternQ, newRoot, spacing);
			
			int tally = tallyNodeLinkCount(firstNodeLink);
//			System.out.println(freqPatternQ);
			if (tally < this.getMinFreqItemsetSupport())
			{
				this.iterationItemsetCount++;
				this.stats.numItemsets++;
				this.stats.numItemsetItems += freqPatternQ.size();
				writeToFile(freqPatternQ, tally);
			}
			
			if (alpha.isEmpty())
			{
//				System.out.println(itemName + " | " + this.iterationItemsetCount);
				this.stats.numIterationItemsets.add(this.iterationItemsetCount);
			}
			
		}
	}
	
	int fileNum;
	
	private void writeToFile(Set<Integer> itemset, int support)
	{
		if (!write)
			return;
		
		try
		{
			if (this.stats.numItemsets % (1 << 22) == 0)
			{
				this.fileNum++;
				this.bw.close();
				this.bw = new BufferedWriter(new FileWriter(this.filename + "." + this.fileNum));
			}
			this.bw.write(itemset.toString());
			this.bw.write(",");
			this.bw.write(Integer.toString(support));
			this.bw.newLine();
		} catch (IOException e)
		{
			e.printStackTrace();
		}
	}
	
	// add up the frequencies of the nodes linked together by NodeLink
	private int tallyNodeLinkCount(Node node)
	{
		int count = 0;
		for (Node temp = node; temp != null; count += temp.getCount(), temp = temp.getNodeLink())
			;
		return count;
	}
	
	/**
	 * Constructs a conditional FP-Tree using a series of <tt>Node</tt> objects that begins with <tt>firstNode</tt>, and connected together
	 * by <tt>nodeLink</tt>. The conditional tree does not include the item represented by the firstNode. The returned Header Table is of
	 * the constructed conditional FP-Tree.
	 * 
	 * @param root
	 *            - the Node that will be used as the root of the new FP-Tree that is constructed
	 * @param firstNode
	 *            - the first <tt>Node</tt> in a series of nodes
	 * @return the Header Table of the conditional FP-tree that is constructed
	 * 
	 */
	private Map<Integer, Node> constructConditional(Node firstNode, String spacing, Node root)
	{
		
		// make conditional pattern base, and count item frequencies
		Map<List<Integer>, Integer> conditionalPatterns = new HashMap<List<Integer>, Integer>();
		// for storing 1-itemset frequencies
		Map<Integer, Integer> freqs = new HashMap<Integer, Integer>();
		
		Node linkNode;
		for (linkNode = firstNode; linkNode != null; linkNode = linkNode.getNodeLink())
		{
			// build pattern by going up parents
			// stores 1 conditional pattern
			ArrayList<Integer> pattern = new ArrayList<Integer>();
			
			// don't include the current node in the header table
			Node parentNode = linkNode.parent;
			
			while (parentNode != null && parentNode.itemName != -1)
			{
				
				// counting 1-itemset frequencies by going through link
				// nodes and their parents
				if (!freqs.containsKey(parentNode.itemName))
				{
					freqs.put(parentNode.itemName, linkNode.getCount());
				} else
				{
					freqs.put(parentNode.itemName, freqs.get(parentNode.itemName) + linkNode.getCount());
				}
				pattern.add(parentNode.itemName);
				parentNode = parentNode.parent;
			}
			// add pattern to pattern-base
			Collections.reverse(pattern);
			conditionalPatterns.put(pattern, linkNode.getCount());
		}
		
		// newFList contains only item names that have over the required support
		// TODO: have to change this to use createLists
		List<Integer> newFList = createFList(freqs);
		
//		List<Integer> aList = new ArrayList<Integer>();
//		List<Integer> fList = new ArrayList<Integer>();
//		List<Integer> rList = new ArrayList<Integer>();
//		createLists(freqs, aList, fList, rList);
		
		// make tree
		// build the tree using transactions
		Map<Integer, Node> newHeaderTable = processTransactions(conditionalPatterns, newFList, root);
		return newHeaderTable;
	}
}
