/**
 * ALGORITHM: 
 * 
 * base case: pick the k-1 concepts that have the largest conditional 
 *            probability (i.e. child-card/parent-card.)
 *            
 * recursive case: Suppose we are looking at c2 which has 3 children c3,c4,c5
 * each with its corresponding k candidates. We consider merging of each candidate
 * to its parent, so we use the card of parent as probabiltiy denominator.
 * For e.g. if a meger of c8,c3 is considered, then the probability is
 * (c3.dCard + c8.dCard)/c3.card. This is as if the child c3 is asked to
 * reduce its candidates to k-1 k-2, k-3, etc. The same for each child.    
 * 
 * If, once each child is reduced to 1, still more than k candidates remain, i.e. 
 * the degree of c2 is > k, then pick the top-k and merge the rest to c2. 
 * 
 */

package org.rescue.analysis.summarize;
import java.util.Enumeration;
import java.util.Vector;

import javax.swing.tree.DefaultMutableTreeNode;

import org.rescue.Dag.*;

public class GreedySummarizor {
	int totalCard;
	
	public DefaultMutableTreeNode summarize (DirectedAcyclicGraph t, Concept rootConcept, int totalCard, int k) {
		this.totalCard = totalCard;
		Enumeration<DAGNode> nodes = t.postOrder(rootConcept);
		while (nodes.hasMoreElements()) {
			DAGNode cnode = nodes.nextElement();
			//	get one created during tree formation
			Concept con = cnode.getNodeConcept();

			/* Add self in candidates tree  */
			DefaultMutableTreeNode cand = new DefaultMutableTreeNode();
			SCandidateData candData = new SCandidateData();
			candData.setId(con.id);
			candData.setDCard (con.dCard); // initialize (will contain card of merged annotations)
			candData.setCard (con.card); // should strictly be: conData.card - (card's of all candidate descendents)   
			candData.setSubCandsCount(1);
			cand.setUserObject(candData);
			SAuxiliaryData sumOther = new SAuxiliaryData();
			sumOther.setCandTree(cand);
			con.setAuxData(sumOther);

			// use cardinality set by TaxonomyReader to recursively compute parent cardinality
			con.card += con.dCard; // initialized card
			if (!cnode.isRoot()) { 
				Vector<Concept> pars = t.getParents(con);
				for (int i = 0; i < pars.size(); i++) {
					pars.elementAt(i).card += con.card;
					// System.out.println(con.id + " par card " + pars.elementAt(i).card);
				}
			}

			if (con.card > 0) { // if not a null concept which should be ignored
				if (!cnode.isLeaf()) // || con.card != con.dCard) // if not leaf or node with all descendts ignored 
				{
					/* Growing phase */	
					/* Populate node's cand tree with all the candidates from all of its children;
					The candidate node being added may in turn has its own
					children which remain linked with the node. */

					for (int i = 0; i < cnode.getChildCount(); i++) 
					{
						Concept childCon = cnode.getChildAt(i).getNodeConcept();

						// since we do not remove zero-cardinality concepts 
						if (childCon.card > 0) {
							//System.out.println(childCon.id);
							DefaultMutableTreeNode childCand = ((SAuxiliaryData) childCon.getAuxData()).getCandTree(); 
							// make candtrees of all childConcepts children of candNode
							cand.add(childCand);
							SCandidateData childCandData = (SCandidateData)childCand.getUserObject();
							candData.setSubCandsCount(candData.getSubCandsCount() + 
									childCandData.getSubCandsCount());
						}
					}

					/* Merging Phase */
					/* During merging, do a postorder traversal of the candidate tree of each child and
					 * compute information loss of a meerge. 
					 * NOTE: the child cand concept is merged with the current parent cand concept
					 * if it's candidate subtree is empty and the child node is the only one left.
					 * */

					if (candData.getSubCandsCount() <= k ) 
						continue;  // no need to merge, so continue postorder
					else 
					{
						while (candData.getSubCandsCount() > k) 
						{
							SMinLossData minLossData, newMinLossData;
							minLossData = new SMinLossData();
							for (int j = 0; j < cand.getChildCount(); j++) 
							{ 
								// DefaultMutableTreeNode chcand = (DefaultMutableTreeNode)cand.getChildAt(j);

								// consider merging one node
								newMinLossData = findMinLossMerge ((DefaultMutableTreeNode)cand.getChildAt(j));
								if (newMinLossData.getMinloss() == 0.0 
										&& newMinLossData.getMinProbability() < minLossData.getMinProbability()) {
									minLossData = newMinLossData;
								}								
								else if (newMinLossData.getMinloss() < minLossData.getMinloss() ) 
								{
									minLossData = newMinLossData;
								}
							}
							System.out.println("meging " +
									((SCandidateData)minLossData.getMergeParent().getUserObject()).getId()
									+ " -> " +
									((SCandidateData)minLossData.getMergeChild().getUserObject()).getId() 
									+ "  " + minLossData.minloss);

							merge (minLossData); // merge and recomputes cardinality of candidate tree

							// if not already discounted
							if (((SCandidateData)((DefaultMutableTreeNode)minLossData.getMergeParent()).getUserObject()).id !=
								candData.id)
								candData.setSubCandsCount(candData.getSubCandsCount() - 1);
						}
					}
				}
			}
		}
		return ((SAuxiliaryData)rootConcept.getAuxData()).getCandTree();
	}
	
	public SMinLossData findMinLossMerge(DefaultMutableTreeNode childCand) {
		double loss;
		DefaultMutableTreeNode mergeParent;
		DefaultMutableTreeNode mergeChild;
		SMinLossData minLossData = new SMinLossData();
		SMinLossData zeroMinLossData = new SMinLossData();
		Enumeration e = childCand.postorderEnumeration();
		
		while (e.hasMoreElements()) {
			mergeChild = (DefaultMutableTreeNode)e.nextElement();
			if (mergeChild.getParent() != null) { //  && ((CandidateData)mergeChild.getUserObject()).dCard > 0
				SCandidateData mergeChildData = (SCandidateData)mergeChild.getUserObject();
				mergeParent = (DefaultMutableTreeNode) mergeChild.getParent(); 
				SCandidateData mergeParentData = (SCandidateData)mergeParent.getUserObject();
				loss = computeLoss(mergeChildData, mergeParentData);
				// System.out.println(mergeChildData.id + " to " + mergeParentData.id + " " + loss);
				
				if (loss == 0.0) {
					double parentP = (double) (mergeParentData.dCard + mergeChildData.dCard) / totalCard;
					if (parentP < zeroMinLossData.getMinProbability()) {
						zeroMinLossData.setChildCandNode(childCand);
						zeroMinLossData.setMergeChild(mergeChild);
						zeroMinLossData.setMergeParent(mergeParent);
						zeroMinLossData.setMinloss(loss);
						zeroMinLossData.setMinProbability(parentP);
					}
				}
				else if ( loss < minLossData.getMinloss()) {
					// populate minLossData with new data
					minLossData.setChildCandNode(childCand);
					minLossData.setMergeChild(mergeChild);
					minLossData.setMergeParent(mergeParent);
					minLossData.setMinloss(loss);
				}
			}
		}	
		
		if (zeroMinLossData.getMinProbability() != Double.POSITIVE_INFINITY) 
			return zeroMinLossData;
		else return minLossData; 
	}
	
	public void merge (SMinLossData minLossData) {
		DefaultMutableTreeNode mergeChild = minLossData.getMergeChild();
		DefaultMutableTreeNode mergeParent = minLossData.getMergeParent();
	
		// (1) update minLossData.mergeParent.direcCardinality
		SCandidateData mergeParentData = (SCandidateData) mergeParent.getUserObject();
		mergeParentData.setDCard(mergeParentData.getDCard() + 
				((SCandidateData)mergeChild.getUserObject()).getDCard());
		// mergeParentData.setInfoLoss(minLossData.getMinloss());
		
		// (2) making children of minLossData.mergeChild children of minLossData.mergeParent
		while (mergeChild.getChildCount() > 0)
		{
			DefaultMutableTreeNode grandchildCand = (DefaultMutableTreeNode)mergeChild.getFirstChild();
			// CandidateNodeData grandchildCandData = (CandidateNodeData)grandchildCand.getUserObject();
			// mergeParent.add((DefaultMutableTreeNode)grandchildCand.clone());
			mergeParent.add(grandchildCand); // also removes grandchildCand from mergeChild.
											// due to one node one parent requirement
		}
		// (3) removing minLossData.mergeChild
		mergeParentData.setSubCandsCount(mergeParentData.getSubCandsCount() - 1);
		mergeChild.removeFromParent();
	}
	/**
	 * 
	 * @param childcard
	 * @param parentcard
	 * @param distance is the distance between the two merged concepts
	 * @return
	 */
	
	 public double log2(double d) {
	      return Math.log(d)/Math.log(2.0);
	   }
	
	public double computeLoss(SCandidateData mergeChildCandData, 
			SCandidateData mergeAncestorCandData) {
		//double loss = Double.POSITIVE_INFINITY;
		double loss = 0.0;
		// loss = mergeChildCandData.getInfoLoss(); // zero for leaf concepts

		double newParentP = (double) (mergeAncestorCandData.dCard + mergeChildCandData.dCard) / totalCard;
		
		double conditionalP;
		if (mergeChildCandData.dCard != 0 && mergeAncestorCandData.dCard != 0) {
			conditionalP = (double) mergeChildCandData.dCard/
				(mergeChildCandData.dCard + mergeAncestorCandData.dCard);
			loss = - newParentP * conditionalP * log2(conditionalP);
//			System.out.println(childP + "  " + conditionalP + " log part " + log2(conditionalP)
//							+ " loss= " + loss);
		}
		return loss;
	}
}
