package app.frequentpatterns.fpgrowth;

import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * This is an implementation of the COFITREE algorithm
 * 
 * 
 * @author Kumori, 2012
 */
public class AlgoCofiTree {
	private Itemsets frequentItemsets = new Itemsets("FREQUENT ITEMSETS");
	private long startTimestamp; // for stats
	private long endTime; // for stats
	//public int relativeMinsupp;
	public double relativeMinsupp;
	public int numOfItems;
	private int contextSize = 0;
	private Itemsets tempItemsets = new Itemsets("TEMP ITEMSETS");
	private static int counter=0;
	/* private double randomMin,randomMax; */
	/*
	 * public double getRandomMin() { return randomMin; }
	 * 
	 * public void setRandomMin(double randomMin) { this.randomMin = randomMin;
	 * }
	 * 
	 * public double getRandomMax() { return randomMax; }
	 * 
	 * public void setRandomMax(double randomMax) { this.randomMax = randomMax;
	 * }
	 */
	public double minWeight = 0; // min weight
	public double minW = 0; // min weight in condition database
	public double maxW = 0; // max weight in transaction database

	public AlgoCofiTree() {

	}

	/**
	 * @param context
	 * @param minsupp
	 * @param minWeight
	 * @return
	 */

	public Itemsets runAlgorithm(Database context, double minsupp,
			double minWeight) {
		startTimestamp = System.currentTimeMillis();
		//this.relativeMinsupp = (int) Math.ceil(minsupp * context.size());
		this.relativeMinsupp = minsupp * context.size();
		this.minWeight = minWeight;
		// (1) PREPROCESSING: Initial database scan to determine the frequency
		// of each item
		final Map<Integer, Integer> mapSupport = context.mapSupport;
		final Map<Integer, Double> mapWeight = context.mapWeight;
		

		this.maxW = getMaxWeight(context.mapWeight);
		// (2) PREPROCESSING: All infrequent items are discarded from the
		// database
		for (Itemset itemset : context.getObjects()) { // for each transactions
			for (int i = itemset.size() - 1; i >= 0; i--) { // for each items
				// remove the item if its support is lower than minsup
				// if  ((mapSupport.get(itemset.get(i)) < relativeMinsupp)){  
				if ((mapSupport.get(itemset.get(i)) < relativeMinsupp)
						&& (mapWeight.get(itemset.get(i)) < this.minWeight)
						|| (mapSupport.get(itemset.get(i)) * maxW < relativeMinsupp)) {
					itemset.getItems().remove(i);
				}
			}
		}

		// (3) PREPROCESSING: Sort items in each transaction in a ascending
		// order
		// according to their weight in the database.

		for (Itemset itemset : context.getObjects()) {
			Collections.sort(itemset.getItems(), new Comparator<Integer>() {
				@Override
				public int compare(Integer item1, Integer item2) {
					return mapWeight.get(item1).compareTo(mapWeight.get(item2));
				}
			});
		}

		// Print context
		if (context.printAble == true) {
			context.printContext();
			/*System.out
					.println("Generated weight of items using random function:");*/
			context.printContextWeight();
		}

		// (4) Build the initial FP-TREE
		FPTree tree = new FPTree();
		// We add each transactions in the FP-Tree one by one
		for (Itemset transaction : context.getObjects()) {
			tree.addTransaction(transaction);
		}
		// We create the header table for the tree
		tree.createHeaderList(mapWeight);

		setContextSize(context.size());
		context = null;

		// (5) We start to mine the FP-Tree by calling the recursive method.
		// Initially, the prefix alpha is empty.
		Itemset prefixAlpha = new Itemset();
		prefixAlpha.setTransactioncount(getContextSize());
		// fpgrowth(tree, prefixAlpha, mapSupport,mapWeight);
		cofiAlgo(tree, prefixAlpha, mapSupport, mapWeight);
		endTime = System.currentTimeMillis();

		return getFrequentItemsets(); // Return all frequent itemsets found!
	}
	/**
	 * This method mines pattern from a Prefix-Tree 
	 * 
	 * @param tree
	 *            The Prefix Tree
	 * @param prefix
	 *            The current prefix "alpha"
	 * @param mapSupport
	 *            The frequency of each item in the prefix tree.
	 */
	private void cofiAlgo(FPTree tree, Itemset prefixAlpha,
			Map<Integer, Integer> mapSupport, Map<Integer, Double> mapWeight) {

		// For each item in headerlist
		for (int i = tree.headerList.size() - 1; i >= 0; i--) {
			Integer item = tree.headerList.get(i);

			int support = mapSupport.get(item);
			double weight = mapWeight.get(item);
			// if the item is not frequent, we skip it
			if ((support < relativeMinsupp) && (weight < minWeight)
					|| (support * maxW < relativeMinsupp)) {
				continue;
			}
			
			 // if(support < relativeMinsupp){ continue; }
			 
			// Create Beta by concatenating Alpha with the current item
			// and add it to the list of frequent patterns
			Itemset beta = prefixAlpha.cloneItemset();
			beta.addItem(item);
			
			if (prefixAlpha.getAbsoluteSupport() < support) {
				beta.setTransactioncount(prefixAlpha.getAbsoluteSupport());
			} else {
				beta.setTransactioncount(support);
			}
			getFrequentItemsets().addItemset(beta, beta.size());

			// === Construct beta's conditional pattern base ===
			// It is a subdatabase which consists of the set of prefix paths
			// in the FP-tree co-occuring with the suffix pattern.
			List<List<FPNode>> prefixPaths = new ArrayList<List<FPNode>>();
			FPNode path = tree.mapItemNodes.get(item);
			while (path != null) {
				// if the path is not just the root node
				if (path.parent.itemID != -1) {
					// create the prefixpath
					List<FPNode> prefixPath = new ArrayList<FPNode>();
					// add this node.
					prefixPath.add(path); // NOTE: we add it just to keep its
											// support,
					// actually it should not be part of the prefixPath

					// Recursively add all the parents of this node.
					FPNode parent = path.parent;
					while (parent.itemID != -1) {
						prefixPath.add(parent);
						parent = parent.parent;
					}
					prefixPaths.add(prefixPath);
				}
				// We will look for the next prefixpath
				path = path.nodeLink;
			}

			// (A) Calculate the frequency of each item in the prefixpath
			Map<Integer, Integer> mapSupportBeta = new HashMap<Integer, Integer>();
			// Map<Integer, Double> mapWeightBeta = new HashMap<Integer,
			// Double>();
			// for each prefixpath
			for (List<FPNode> prefixPath : prefixPaths) {
				// the support of the prefixpath is the support of its first
				// node.
				int pathCount = prefixPath.get(0).counter;
				for (int j = 1; j < prefixPath.size(); j++) { // for each node,
																// except the
																// first one, we
																// count the
																// frequency
					FPNode node = prefixPath.get(j);
					if (mapSupportBeta.get(node.itemID) == null) {
						mapSupportBeta.put(node.itemID, pathCount);
					} else {
						mapSupportBeta.put(node.itemID,
								mapSupportBeta.get(node.itemID) + pathCount);
					}
				}
			}
			//TODO Starting from this
			// (B) Construct X-Cofi tree
			CofiTree treeBeta = new CofiTree(item);
			// add each prefixpath in the FP-tree
			for (List<FPNode> prefixPath : prefixPaths) {
				treeBeta.addPrefixPath(prefixPath, mapSupportBeta, mapWeight,
						relativeMinsupp, minWeight);
			}
			treeBeta.createHeaderList(mapWeight);
			//printCofi(treeBeta);
			
			// Mine the Beta tree using cofi algorithm.
			//Itemsets frequentItemsets = new Itemsets("COFI Frequent itemsets");
			if (treeBeta.root.childs.size() > 0) {
				miningCofi(treeBeta,mapSupportBeta, mapWeight);
				//fpgrowth(treeBeta, beta, mapSupportBeta, mapWeight);
				treeBeta = null;
				System.gc();
			}
			
		}
	}

	private void printCofi(CofiTree treeBeta) {
		// TODO Auto-generated method stub
		CofiNode path = treeBeta.root;
		int level =1;
		CofiNode child=null;
		while (path.childs.size() >0){
			System.out.println(path.itemID);
			System.out.print("("+level+")");
			//for (int count =1;count <= level;count++) System.out.print("("+level+")");
			for (int i=0;i<path.childs.size();i++){
				child = path.childs.get(i);
				System.out.println(child.itemID);
				System.out.print( "("  +level+ "." + i+1);
				System.out.print(")");
			}
			level++;
			path = child;
		}
	}

	private void miningCofi(CofiTree tree,
			Map<Integer, Integer> mapSupportBeta, Map<Integer, Double> mapWeight) {
		// TODO Auto-generated method stub
//		Itemset frequentItem = new Itemset();
		//counter =0;
		int rootItem = tree.root.itemID;
		//Initialization Candidate patterns of cofitree
		Itemsets candidatePatterns = new Itemsets("CANDIDATE ITEMSET");
		//TODO select next node need to be review
		//Select next node: from last item in header table to first item
		for(int i= tree.headerList.size()-1; i>=0; i--){
			//D = perfixPaths - set of nodes from node A to root
			//List<List<CofiNode>> prefixPaths = new ArrayList<List<CofiNode>>();
			Integer item = tree.headerList.get(i);
			CofiNode path = tree.mapItemNodes.get(item);
			//nodeA = Select next node: follow header table item's nodelink 
			while (path != null) {
			//	if (path.parent.itemID != -1) {
					Itemset prefixItemset = new Itemset();
					Integer pCount = path.participation;
					Integer support = path.counter;
					//F = nodeA.frequency-nodeA.participation-count
					Integer branchSupport = support - pCount;
					Double weight=0.0;
					tempItemsets = new Itemsets("TEMP ITEMSET");
					//Generate all candidate pattern from list D (list of items from node A to root)
					generateAllCandidatePattern(rootItem,path,prefixItemset,branchSupport);
					/*CofiNode parent = path.parent;
					int itemsize =1;
					while (parent != null) {
						itemsize++;
						parent = parent.parent;
					}*/
					//generateAllCandidatePattern1(path,prefixItemset,0,itemsize,branchSupport);
					/**for (List<Itemset> listItemsets:tempItemsets.getLevels()){
						for(Itemset tempItemset:listItemsets){
							//Itemsets that do not have node A will be discard
							if (tempItemset.contains(rootItem)){
								Itemset curItemset = candidatePatterns.getItemset(tempItemset);
								//pattern in X that does not exist in A-candidate List will be add to it
								//with frequency F (brachSupport)
								if (curItemset == null){
									tempItemset.setTransactioncount(branchSupport);
									candidatePatterns.addItemset(tempItemset, tempItemset.size());
									//getFrequentItemsets().addItemset(tempItemset, tempItemset.size());
						    	//otherwise, just increase their frequency with F		
								} else{
									int transactionCount = curItemset.getAbsoluteSupport();
									candidatePatterns.setTransactionCount(curItemset, transactionCount+branchSupport);
									//getFrequentItemsets().setTransactionCount(curItemset, transactionCount+branchSupport);
								}
							}
							
						}
					}*/
			    // We will look for the next prefixpath
				path = path.nodeLink;
			}
			
		}
		//TODO Not done yet
		//removeUnsupportItems(frequentItemsets,mapWeight);
		System.out.println("So lan thuc hien generate tap ung vien:"+counter);
	//	return candidatePatterns;
	}
	
	private void removeUnsupportItems(Itemsets candidatePatterns, Map<Integer,Double> mapWeight) {
		double support = 0;
		//double weight =0;
		double maxW =0;
		//Itemsets frequentPattern = new Itemsets("Frequent Pattern");
		List<List<Itemset>> tempItemsets = candidatePatterns.getLevels();
		
		for (List<Itemset> level:tempItemsets){
			for(Itemset itemset:level){
				Itemset curItemset = candidatePatterns.getItemset(itemset);
				support = itemset.getAbsoluteSupport();
				maxW=0;
				for (Integer item : itemset.getItems()){
					if (maxW <mapWeight.get(item)){
						maxW= mapWeight.get(item);
					};
				};
				if (support*maxW>relativeMinsupp){
						getFrequentItemsets().addItemset(itemset, itemset.size());
				}else{
					//System.out.println("Itemset "+itemset.toString()+ " sup*maxW ="+ support*maxW);
				}
			}
		};
	}

	
	private double getMaxWeight(Map<Integer, Double> mapWeight) {
		double maxW =0;
		for (Double weight:mapWeight.values()){
			if (maxW < weight) maxW = weight;
		};
		return maxW;
	}

	/**
	 * This method is for adding recursively all combinations of nodes in a
	 * path, concatenated with a given prefix, to the set of patterns found.
	 * 
	 * @param nodeLink
	 *            the first node of the path
	 * @param prefix
	 *            the prefix
	 * @param minsupportForNode
	 *            the support of this path.
	 */
	private void addAllCombinationsForPathAndPrefix(FPNode node, Itemset prefix) {
		// We add the node to the prefix
		Itemset itemset = prefix.cloneItemset();
		itemset.addItem(node.itemID);

		itemset.setTransactioncount(node.counter);
		getFrequentItemsets().addItemset(itemset, itemset.size());

		// recursive call if there is a node link
		if (node.nodeLink != null) {
			addAllCombinationsForPathAndPrefix(node.nodeLink, prefix);
			addAllCombinationsForPathAndPrefix(node.nodeLink, itemset);
		}
	}
	private void generateAllCandidatePattern(int rootNode, CofiNode node, Itemset prefix,Integer branchSupport) {
		// We add the node to the prefix
		//while (node.parent !=null){
		counter++;
		Itemset itemset = prefix.cloneItemset();
		//Itemset itemset = prefix;
		node.participation = branchSupport;
		itemset.addItem(node.itemID);
		itemset.setTransactioncount(branchSupport);
		/*if (itemset.contains(rootNode)){
		  if (getFrequentItemsets().getItemset(itemset)==null){
			  getFrequentItemsets().addItemset(itemset, itemset.size());
		  }else{
			  int tcount = getFrequentItemsets().getItemset(itemset).getAbsoluteSupport();
			  getFrequentItemsets().getItemset(itemset).setTransactioncount(tcount + branchSupport);
		  }
		}*/
		getTempItemsets().addItemset(itemset, itemset.size());
		System.gc();
				// recursive call
		if (node.parent != null) {
			
			generateAllCandidatePattern(rootNode, node.parent, prefix,branchSupport);
			generateAllCandidatePattern(rootNode, node.parent, itemset,branchSupport);
		}
		//}
	}	
	protected List<Itemset> generateCandidateSizeK(List<Itemset> levelK_1) throws IOException {
		List<Itemset> candidates = new ArrayList<Itemset>();
		
// For each itemset I1 and I2 of level k-1
loop1:	for(int i=0; i< levelK_1.size(); i++){
			Itemset itemset1 = levelK_1.get(i);
loop2:		for(int j=i+1; j< levelK_1.size(); j++){
				Itemset itemset2 = levelK_1.get(j);
				
				// we compare items of itemset1  and itemset2.
				// If they have all the same k-1 items and the last item of itemset1 is smaller than
				// the last item of itemset2, we will combine them to generate a candidate
				for(int k=0; k< itemset1.size(); k++){
					// if they are the last items
					if(k == itemset1.size()-1){ 
						// the one from itemset1 should be smaller (lexical order) 
						// and different from the one of itemset2
						if(itemset1.getItems().get(k) >= itemset2.get(k)){  
							continue loop1;
						}
					}
					// if they are not the last items, and 
					else if(itemset1.getItems().get(k) < itemset2.get(k)){ 
						continue loop2; // we continue searching
					}
					else if(itemset1.getItems().get(k) > itemset2.get(k)){ 
						continue loop1;  // we stop searching:  because of lexical order
					}
				}
				
				// NOW COMBINE ITEMSET 1 AND ITEMSET 2
				Integer missing = itemset2.get(itemset2.size()-1);
			}
		}
		return candidates;
	}
	public void printStats() {
		System.out.println("=============  FP-GROWTH - STATS =============");
		long temps = endTime - startTimestamp;
		System.out.println(" Transactions count from database : "
				+ getContextSize());
		System.out.println(" Frequent itemsets count : "
				+ getFrequentItemsets().getItemsetsCount());
		System.out.println(" Total time ~ " + temps + " ms");
		System.out
				.println("===================================================");
	}
	public void printStats(boolean cofi) {
		System.out.println("=============  COFITREE - STATS =============");
		long temps = endTime - startTimestamp;
		System.out.println(" Transactions count from database : "
				+ getContextSize());
		System.out.println(" Frequent itemsets count : "
				+ getFrequentItemsets().getItemsetsCount());
		System.out.println(" Total time ~ " + temps + " ms");
		System.out
				.println("===================================================");
	}
	public long getStartTimestamp() {
		return startTimestamp;
	}

	public void setStartTimestamp(long startTimestamp) {
		this.startTimestamp = startTimestamp;
	}

	public long getEndTime() {
		return endTime;
	}

	public void setEndTime(long endTime) {
		this.endTime = endTime;
	}

	public Itemsets getItemsets() {
		return getFrequentItemsets();
	}

	public int getContextSize() {
		return contextSize;
	}

	public void setContextSize(int contextSize) {
		this.contextSize = contextSize;
	}

	public Itemsets getFrequentItemsets() {
		return frequentItemsets;
	}

	public void setFrequentItemsets(Itemsets frequentItemsets) {
		this.frequentItemsets = frequentItemsets;
	}

	public Itemsets getTempItemsets() {
		return tempItemsets;
	}

	public void setTempItemsets(Itemsets tempItemsets) {
		this.tempItemsets = tempItemsets;
	}

}
