/*
 * ClonePhylogeny.java 
 *
 * Track clone phylogeny of crypts.
 * The tree will be binary and will NOT allow tri-,multi-furcations
 * Tree is built forward in time
 * Clones that go extinct terminate the branch for the clone at a specified time
 * If two mutants happen at the same time, separate them by 0.0000000001 time units
 * although it is highly improbable that the random generator will happen to generate 2 mutations at the same time. 
 */

package sim;
import java.io.*;
import java.util.Collection;
import java.util.TreeMap;
import java.util.Iterator;
import java.util.HashSet;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Stack;
import java.text.NumberFormat;
import java.text.DecimalFormat;

public class ClonePhylogeny {
    
    /*
     *  Node in the phylogeny
     */
    static class Node {   
	int cloneId; // up to 2,147,483,647
	float time; // float to save memory
	Node left;
	Node right;
	Node parent;
	// short to save memory
	short mutation_difference; // this clone differs from parent clone with 1 mutation at 1 locus
	// Microsat loci are special, mut_diff identifies microsat locus, and ms_shift_up provides
	// the extra bit of information how the microsat changed relative to its parent
	boolean ms_shift_up; // true if microsat increased length, false if microsat decreased length relative to parent
	
	float frequency; // float to save memory

	Node(int cloneId, double time, Node parent, int mutated_locus, boolean ms_up) {
	    this.cloneId = cloneId;
	    this.time = (float)time;
	    this.parent = parent;
	    left = null;
	    right = null;
	    mutation_difference = (short)mutated_locus;
	    ms_shift_up = ms_up;
	}
    };
    
    public static class CloneActionRates{
	public Double divisionRate;
	public Double deathRate;
	public Double neutralMutationRate;
	public Double selectiveMutationRate;	
	CloneActionRates(Double a, Double b, Double c, Double d){
	    this.divisionRate = a;
	    this.deathRate = b;
	    this.neutralMutationRate = c;
	    this.selectiveMutationRate = d;
	}
    }

    private static Node root;
    //private static final double RESOLVE_MULTIFURCATION_TIME = 0.000000000001;
    //private static double phylogenyCurrentTime;

    // Holds pointers to the most recent node of clone n
    // obviating the need to do depth-first-search to find the most recent node of clone n
    // Maps clones to nodes in the phylogeny
    private static TreeMap<Integer,Node> cloneNodeMap; 

    // Holds pointers to the current clone and its selection parameters
    private static TreeMap<Integer,CloneActionRates> cloneNodeActionRatesMap; 
  
    /**
     * Creates phylogeny, sets root node to be the wild-type clone (cloneId=0, time=0)
     */    
    public ClonePhylogeny(){
	root = new Node(0,0,null,-1,false);
	root.parent = new Node(0,0,null,-1,false);
	cloneNodeMap = new TreeMap();
	cloneNodeMap.put(root.cloneId,root);
	cloneNodeActionRatesMap = new TreeMap();
	cloneNodeActionRatesMap.put(new Integer(root.cloneId), new CloneActionRates(0.0,0.0,0.0,0.0));	
    }

    /**
     * Adds a new clone to the phylogeny
     * @param cloneId Clone id of the new mutant clone, which is increasing 1..n during the simulation
     * @param splitTime The time of speciation
     * @param parentCloneId Clone id of the parent clone
     * @param mutated_locus The locus which is hit by a new mutation
     * @param ms_shift_up Whether the microsat length is shifted up (true) or down (false) if locus that was hit is a microsat
     */    
    public void addNewClone(int cloneId, double splitTime, int parentCloneId, int mutated_locus, boolean ms_shift_up) throws Exception{
	// Get the node from the cloneNodeMap
	Node parentClone = (Node)cloneNodeMap.get(parentCloneId);
	
	// Check if clone is extinct, but there is an attempt to add a new clone from it
	// an extinct clone is not present in cloneNodeMap
	// NOTE: perhaps i do not need to do this check for speedup.
	if(parentClone == null){
	    throw new Exception("Attempt to add a new clone whose parent is an already extinct clone with new clone_id "+cloneId+" and parent clone_id "+parentCloneId);
	}
	
	// Add new clone left or right
	if(parentClone.left == null){
	    // Drag the node
	    parentClone.time = (float)splitTime;
	    // To the left we continue the parent clone and to the right we add on the new clone
	    // The left child node is the same clone as the internal node, so its mutation differences are null
	    // The microsat mutational state of up/down is simply copied
	    // when we reconstruct the mutational pattern by rolling up the tree, we will ignore
	    // internal nodes with the same cloneId, and reconstruct the mutational pattern only upon
	    // a change in the cloneId from child to parent
	    Node leftClone = new Node(parentCloneId, splitTime, parentClone, parentClone.mutation_difference, parentClone.ms_shift_up);
	    // The right child node is the new clone that has mut_diff mutations compared to the parent clone
	    // If locus is a microsat, add the extra bit shifted up
	    Node rightClone = new Node(cloneId, splitTime, parentClone, mutated_locus, ms_shift_up);
	    // Add them to the internal node
	    parentClone.left = leftClone;
	    parentClone.right = rightClone;
	    // put back in map
	    cloneNodeMap.put(leftClone.cloneId, leftClone);
	    cloneNodeMap.put(rightClone.cloneId, rightClone);
	}	
    }

    /**
     * Adds the various action rates (division, death, neutral mutation, selective mutation) to the current list of clones  
     * @param
     */
    public void addNewCloneActionRates(int cid, Double a, Double b, Double c, Double d){
	cloneNodeActionRatesMap.put(new Integer(cid), new CloneActionRates(new Double(a),new Double(b),new Double(c),new Double(d))); 	
    }

    /**
     * Outputs .clones file
     * @param
     */
    public static void outputClonesActionRates(HashSet<Integer> extantClonesList, String destination) throws Exception{	
	// .clones file
	PrintWriter output3 = null;	
	NumberFormat decForm = new DecimalFormat("0.###E0");
	// Save the clones frequencies and clones in a file
	try{
	    output3 = new PrintWriter( new BufferedWriter( new FileWriter(destination+".clones",true)));
	    for (Iterator i = extantClonesList.iterator(); i.hasNext();){
		Integer key = (Integer)i.next();	    
		CloneActionRates car = (CloneActionRates) cloneNodeActionRatesMap.get(key);
		StringBuffer sb = new StringBuffer();
		sb.append(key.intValue()+",");		    
		sb.append(decForm.format(car.divisionRate)+",")
		    .append(decForm.format(car.deathRate)+",")
		    .append(decForm.format(car.neutralMutationRate) + ",")
		    .append(decForm.format(car.selectiveMutationRate));
		output3.println(sb.toString());
	    }
	}    
	catch(Exception ex){
            System.out.println( "Exception caught in ClonePhylogeny.outputClonesActionRates(): " + ex.getMessage());
            ex.printStackTrace(System.out);
        }
	finally {
	    if (output3 != null) output3.close();
	}	
    }



    public void terminate(double time){
	// Get all alive clones (present in cloneNodeMap) and extend them until termination time
	Collection c = cloneNodeMap.values();
	// Obtain an Iterator for Collection
	Iterator itr = c.iterator();   
	// Iterate through TreeMap values iterator
	while(itr.hasNext()){
	    Node n = (Node)itr.next();
	    //System.out.println(n.cloneId+":"+n.time);
	    n.time = (float)time;	
	}
    }

    public void terminate(double time, HashSet<Integer> currentList){
	// Get all alive clones (present in cloneNodeMap) and extend them until termination time
	Collection c = cloneNodeMap.values();
	// Obtain an Iterator for Collection
	Iterator itr = c.iterator();   
	// Iterate through TreeMap values iterator
	while(itr.hasNext()){
	    Node n = (Node)itr.next();
	    // Drag the time of the node only if present in the list of extant clones
	    if(currentList.contains(n.cloneId)){
		n.time = (float)time;	
	    }
	}
    }

    // This is fine!
    public static void terminateClone(int cloneId){
	Node n = (Node)cloneNodeMap.get(cloneId);
	//n.time = time;
	cloneNodeMap.remove(cloneId);
	cloneNodeActionRatesMap.remove(cloneId);	
    }


    public static String newickString;
    public static void getNewickDFS(Node currentNode){
	if(currentNode.left==null&&currentNode.right==null){
	    double branchLength = currentNode.time-currentNode.parent.time;
	    NumberFormat decForm = new DecimalFormat("#0.000");
            newickString += currentNode.cloneId+":"+decForm.format(branchLength);
	    return;
        }

	newickString+="(";
	if(currentNode.left!=null){ 
	    getNewickDFS(currentNode.left);
	}
	newickString+=",";
	if(currentNode.right!=null){ 
	    getNewickDFS(currentNode.right);
	}
	newickString+=")";
	if(currentNode.parent != null){
	    double branchLength = currentNode.time-currentNode.parent.time;
	    NumberFormat decForm = new DecimalFormat("#0.000");
	    newickString += ":"+decForm.format(branchLength);
	}
	return;
    }

    public static String getNewick(){
	newickString="";
	getNewickDFS(root);
	return newickString;
    }

    
    /*
    public static void DFS(Node n){
	//System.out.println("Enter DFS with node "+n.cloneId+" mutation at locus "+n.mutation_difference+" = "+n.ms_shift_up);
	//printPattern();
	if(n.left==null){
	    //output pattern for n.cloneId 
	    // n.cloneId, 100, 101, 98, ...
	    int ind = 0;
	    for (Iterator i = _dumpedClonesList.iterator(); i.hasNext();) {
                Integer key = (Integer)i.next();
		if(key==n.cloneId){
		    break; 
		}
		ind++;
	    }
	    for(int i=0; i<_pattern.length; i++){
		_clonePattern[ind][i] = _pattern[i];
	    }
	    //System.out.println("Pattern dumped for node "+n.cloneId);
	    return;
	} else {
	    // Go left
	    //System.out.println("Go Left");
	    DFS(n.left);
	}
	// Go right only if cloneId exists in dumpedList
	if(_dumpedClonesList.contains(n.right.cloneId)){
	    // If it is a microsat
	    if(n.right.mutation_difference<_nmsloci){
		if(n.right.ms_shift_up){
		    _pattern[n.right.mutation_difference] += 1;		
		} else {
		    // ms length can't go below 1, a shift down has no effect no length if ms_length==1
		    if(_pattern[n.right.mutation_difference]>1){
			_pattern[n.right.mutation_difference] -= 1;					   
		    }
		}
	    } else {
		_pattern[n.right.mutation_difference] = 1;
	    }
	    //System.out.println("Go Right");	   
	    DFS(n.right);
	    // After rolling back, we need to restore the mutational pattern
	    // If it is a microsat
	    if(n.right.mutation_difference<_nmsloci){
		if(n.right.ms_shift_up){
		    _pattern[n.right.mutation_difference] -= 1;		
		} else {
		    // ms length can't go below 1, a shift down has no effect no length if ms_length==1
		    if(_pattern[n.right.mutation_difference]>1){
			_pattern[n.right.mutation_difference] += 1;					   
		    }
		}
	    } else {
		_pattern[n.right.mutation_difference] = 0;		
	    }
	}
    }
    */
    public static void DFSloop(String destination) throws Exception{
	//System.out.println("Enter DFS with node "+n.cloneId+" mutation at locus "+n.mutation_difference+" = "+n.ms_shift_up);
	//printPattern();
	
	// Output patterns directly in file, do not store in memory!!!!
	PrintWriter output4 = null;
	try {
	    output4 = new PrintWriter( new BufferedWriter( new FileWriter(destination+".patterns",true) ) );	    

	    Stack stack = new Stack();
	    stack.push(root);
	    Node n;
	    while(!stack.empty()){
		n = (Node)stack.peek();
		
		// If we reached a leaf node, either to the left or to the right of its parent
		// when n.left is null , n.right is always null as well
		if(n.left==null){
		    // If I am popping a left child or I am popping a right child
		    if(n.cloneId==n.parent.cloneId){
			// I am popping a left child
			// Print pattern if I have reached the leftmost "leaf" of the tree
			//if(n.left==null){
			    // Print pattern
			    // Economize on space
			    StringBuffer sb = new StringBuffer();
			    sb.append(n.cloneId);
			    for(int i=0;i<_totloci;i++){
				sb.append(","+_pattern[i]);
			    }
			    output4.println(sb.toString());      
			    //}
			    //else {
			    // Do not print pattern, Simply go up to the parent
			    //}
			// In both cases there is no mutation change from parent to child
		    }
		    else{
			// I am popping a right child
			//if(n.right==null){
			    // print pattern
			    StringBuffer sb = new StringBuffer();
			    sb.append(n.cloneId);
			    for(int i=0;i<_totloci;i++){
				sb.append(","+_pattern[i]);
			    }
			    output4.println(sb.toString());      
			    //}
			    //else{
			    // Simply go up to the parent
			    //}
			// In both cases erase the right mutation
			// If mutation is a microsatellite
			    /*
			if(n.mutation_difference<_nmsloci){
			    if(n.ms_shift_up){
				// Restore the length of the microsatellite, subtract one.
				_pattern[n.mutation_difference] -= 1;		
			    } else {
				// ms length can't go below 1, a shift down has no effect no length if ms_length==1
				if(_pattern[n.mutation_difference]>0){
				    _pattern[n.mutation_difference] += 1;					   
				}
			    }
			} else {
			    // If mutation is a selective locus, restore back
			    _pattern[n.mutation_difference] = 0;		
			}
			    */
			
		    }
		    // Both children nodes are null, this is a leaf node, so pop it from the stack
		    n = (Node)stack.pop();
		    // Mark n as visited
		    n.frequency = 1.0f;
		} 
		else
		    
		    // If left node frequency is 1.0, this means it is visited.
		    // The frequency field in the node data structure is used
		    // as a "visited" field.
		    if(n.left.frequency<0.5){
			stack.push(n.left);
		    } 
		    else	       
			if(n.right.frequency<0.5){
			    stack.push(n.right);
			    // Add the mutation difference when going right
			    if(n.right.mutation_difference<_nmsloci){
				if(n.right.ms_shift_up){
				    _pattern[n.right.mutation_difference] += 1;		
				} else {
				    // ms length can't go below 1, a shift down has no effect no length if ms_length==1
				    if(_pattern[n.right.mutation_difference]>0){
					_pattern[n.right.mutation_difference] -= 1;					   
				    }
				}
			    } else {
				_pattern[n.right.mutation_difference] = 1;
			    }		    
			}
			else {
			    // Both left and right children are visited, pop the node from stack and mark it visited
			    // Popping an internal node does require(!) a subtraction of the mutation difference!
			    // restore the pattern from the right node mutation difference
			    if(n.right.mutation_difference<_nmsloci){
				if(n.right.ms_shift_up){
				    // Restore the length of the microsatellite, subtract one.
				    _pattern[n.right.mutation_difference] -= 1;		
				} else {
				    // ms length can't go below 1, a shift down has no effect no length if ms_length==1
				    if(_pattern[n.right.mutation_difference]>0){
					_pattern[n.right.mutation_difference] += 1;					   
				    }
				}
			    } else {
				// If mutation is a selective locus, restore back
				_pattern[n.right.mutation_difference] = 0;		
			    }
			    
			    n = (Node)stack.pop();
			    // Mark n as visited
			    n.frequency = 1.0f;			
			}	    	    
	    }
	}
	catch(Exception ex){
            System.out.println( "Exception caught in ClonePhylogeny.dfsloop(): " + ex.getMessage());
            ex.printStackTrace(System.out);
        }
	finally {
            //if (output2 != null) output2.close();
            if (output4 != null) output4.close();                                                                                            
        }
	
    }
    

    //private static int[][] _clonePattern;
    private static int _nmsloci;
    private static int _totloci;

    //private static HashSet<Integer> _dumpedClonesList;
    //private static HashMap<String,Integer> _patterns;
    private static int[] _pattern;
    public static void getMSPattern(int nmsloci, int totloci, String destination) throws Exception{
	//_patterns = p;       
	_nmsloci = nmsloci;
	_totloci = totloci;
	// Construct
	_pattern = new int[totloci];
	for(int i=0;i<totloci;i++){
	    if(i<nmsloci){
		// Since we always shift up, we can safely start from 0, and every hit is +1, e.g. 0,1,2,3,...
                _pattern[i] = 0; // change from 100 to 0
            } else {
                _pattern[i] = 0;
            }                                                                                                
        } 
	// Need a visited list for iterative post order traversal
	// and we'll use the frequency field for each node
	// where 0.0 means not visited and 1.0 means visited	
	for (Iterator i = cloneNodeMap.keySet().iterator(); i.hasNext();) {
            Integer key = (Integer)i.next();	    
	    Node n = (Node)cloneNodeMap.get(key);
	    n.frequency = 0.0f;
	    Node h = n;
	    while(h.parent!=null){
		h.parent.frequency = 0.0f;
		h = h.parent;
	    }
	}
	DFSloop(destination);
    }
    
    private static void printPattern(){
	for(int i=0; i<_pattern.length; i++){
	    System.out.print(_pattern[i]+",");
	}
	System.out.println();
    }
    
    

    /** Search back all nodes to root to figure out if locus is mutated or not
     * since only mutation differences are stored at each node
     * @param locus The position/locus to be checked if mutated or not
     * @param clone_id The clone_id to start the traceback search
     */
    public boolean isLocusMutated(int locus, int clone_id){	
	boolean res=false;
        Node h = (Node)cloneNodeMap.get(clone_id);
	while(h!=null){
	    if(h.mutation_difference==locus){
		res=true;
		break;
	    }		
	    h = h.parent;
	}   	
	return res;
    }

    /** Search back all nodes to root to figure out if loci are mutated or not
     * since only mutation differences are stored at each node
     * @param clone_id The clone_id to start the traceback search
     * @param startIndex from locus index
     * @param endIndex to locus index
     * @return a String holding the pattern of mutated selective loci of the clone e.g. "001101101"
     */
    public String getMutatedLociPattern(int clone_id, int startIndex, int endIndex){	
	// Initialized with default values - all 0s
	char[] res = new char[endIndex-startIndex+1];
	for(int i=0; i<res.length; i++){
	    res[i] = '0';
	}
        Node h = (Node)cloneNodeMap.get(clone_id);
	while(h!=null){
	    if(h.mutation_difference>=startIndex && h.mutation_difference <=endIndex){
		res[h.mutation_difference-startIndex]='1';
	    }		
	    h = h.parent;
	}   	
	return new String(res);
    }



    private static HashMap<Integer,Integer> reproductionLocusIndexToOrderIndex;
    private static HashMap<Integer,Integer> survivalLocusIndexToOrderIndex;
    // Mutation is tied to division therefore pattern includes both repr and mut loci
    private static HashMap<Integer,Integer> reproductionMutationLocusIndexToOrderIndex;
    // Size of the patterns will be pre-computed
    private static char[] reproductionPattern;
    private static char[] survivalPattern;
    private static char[] reproductionMutationPattern;

    public void setReproductionLociHashMap(HashMap<Integer,Integer> input){
	// Constructs new hashmap with the keys and values of the passes hashmap
	// or, creates a clone of the input, and stores it in the clonephylogeny class
	reproductionLocusIndexToOrderIndex = new HashMap<Integer,Integer>(input);	
	reproductionPattern = new char[input.size()];
	/*
	for (Iterator i = input.keySet().iterator(); i.hasNext();){
	    Integer key = (Integer)i.next();
	    System.out.println("key "+ key+" value"+ (Integer)input.get(key));
	}
	for (Iterator i = reproductionLocusIndexToOrderIndex.keySet().iterator(); i.hasNext();){
	    Integer key = (Integer)i.next();
	    System.out.println("key "+ key+" value"+ (Integer)reproductionLocusIndexToOrderIndex.get(key));
	    }*/

    }

    public void setSurvivalLociHashMap(HashMap<Integer,Integer> input){
	// Constructs new hashmap with the keys and values of the passes hashmap
	// or, creates a clone of the input, and stores it in the clonephylogeny class
	survivalLocusIndexToOrderIndex = new HashMap<Integer,Integer>(input);
	survivalPattern = new char[input.size()];
    }

    public void setReproductionMutationLociHashMap(HashMap<Integer,Integer> input){
	// Constructs new hashmap with the keys and values of the passes hashmap
	// or, creates a clone of the input, and stores it in the clonephylogeny class
	reproductionMutationLocusIndexToOrderIndex = new HashMap<Integer,Integer>(input);
	reproductionMutationPattern = new char[input.size()];
    }

    /** Search back all nodes to root to figure out if loci are mutated or not
     * since only mutation differences are stored at each node
     * @param clone_id The clone_id to start the traceback search
     * @return a String holding the pattern of mutated selective loci of the clone e.g. "001101101"
     */
    public String getReproductionLociPattern(int clone_id){	
	// Initialized with default values - all 0s
	for(int i=0; i<reproductionPattern.length; i++){
	    reproductionPattern[i] = '0';
	}
        Node h = (Node)cloneNodeMap.get(clone_id);
	while(h!=null){
	    // order is a number from 0 to N, or null if the locus has s=0 for the particular selective class	    
	    Integer orderIndex = (Integer)reproductionLocusIndexToOrderIndex.get(new Integer(h.mutation_difference));
	    //System.out.println("traversing node w cloneid "+h.cloneId+ " mut diff "+h.mutation_difference+" orderindex "+orderIndex);
	    if(orderIndex!=null){
		reproductionPattern[orderIndex]='1';
	    }		
	    h = h.parent;
	}   	
	//System.out.println("final pattern "+ new String(reproductionPattern));
	return new String(reproductionPattern);
    }

    public String getSurvivalLociPattern(int clone_id){	
	// Speed up, if no survival loci are present
	if(survivalPattern.length==0){	    
	    return new String();
	}
	// Initialized with default values - all 0s
	for(int i=0; i<survivalPattern.length; i++){
	    survivalPattern[i] = '0';
	}
        Node h = (Node)cloneNodeMap.get(clone_id);
	while(h!=null){
	    // order is a number from 0 to N, or null if the locus has s=0 for the particular selective class
	    Integer orderIndex = (Integer)survivalLocusIndexToOrderIndex.get(new Integer(h.mutation_difference));
	    if(orderIndex!=null){
		survivalPattern[orderIndex]='1';
	    }		
	    h = h.parent;
	}   	
	return new String(survivalPattern);
    }

    public String getReproductionMutationLociPattern(int clone_id){	
	// Initialized with default values - all 0s
	for(int i=0; i<reproductionMutationPattern.length; i++){
	    reproductionMutationPattern[i] = '0';
	}
        Node h = (Node)cloneNodeMap.get(clone_id);
	while(h!=null){
	    // order is a number from 0 to N, or null if the locus has s=0 for the particular selective class
	    Integer orderIndex = (Integer)reproductionMutationLocusIndexToOrderIndex.get(new Integer(h.mutation_difference));
	    if(orderIndex!=null){
		reproductionMutationPattern[orderIndex]='1';
	    }		
	    h = h.parent;
	}   	
	return new String(reproductionMutationPattern);
    }

    /*
    public void addSurvival(int clone_id, double s){
	CloneSelectionParameters n = (CloneSelectionParameters)cloneNodeSelectionMap.get(clone_id);
	n.survival *= s;
	cloneNodeSelectionMap.put(clone_id,n);
    }
    public void addReproduction(int clone_id, double s){
	CloneSelectionParameters n = (CloneSelectionParameters)cloneNodeSelectionMap.get(clone_id);
	n.reproduction *= s; 
	cloneNodeSelectionMap.put(clone_id,n);
    }
    public void addMutation(int clone_id, double s){
	CloneSelectionParameters n = (CloneSelectionParameters)cloneNodeSelectionMap.get(clone_id);
	n.mutation *= s; 
	cloneNodeSelectionMap.put(clone_id,n);
    }

    public double getSurvival(int clone_id){
	return ((CloneSelectionParameters)cloneNodeSelectionMap.get(clone_id)).survival;
    }
    public double getReproduction(int clone_id){
	return ((CloneSelectionParameters)cloneNodeSelectionMap.get(clone_id)).reproduction;
    }
    public double getMutation(int clone_id){
	return ((CloneSelectionParameters)cloneNodeSelectionMap.get(clone_id)).mutation;
	}*/

    /** Get the locus last mutated for a clone
     *  Useful when discriminating selective from neutral fixation events
     * @param clone_id The id of the clone
     * @return Returns locus last mutated 
     */
    public short getCloneLastMutatedLocus(int clone_id){
	return ((Node)cloneNodeMap.get(clone_id)).mutation_difference;
    }

    /** Check if a clone has fixed. Add up children's frequencies to calculate parent's frequency
     * @param freqMap A hashmap containing <clone_id,clone_frequency>
     * @param size The number of crypts
     * @return Returns a HashMap of <clone_id, true/false> 
     */
    public HashMap<Integer,Float> checkFixation(HashMap<Integer,Integer> freqMap, int size){
	HashMap<Integer,Float> res = new HashMap();
	ArrayList<Node> nodes = new ArrayList();
	
	// Add all nodes
	for (Iterator i = freqMap.keySet().iterator(); i.hasNext();) {
            Integer key = (Integer)i.next();
            Integer freq = freqMap.get(key);
            nodes.add((Node)cloneNodeMap.get(key));
	    ((Node)cloneNodeMap.get(key)).frequency = freq.floatValue()/size;
        }
	
	// Zero all frequencies
	for(int i=0;i<nodes.size();i++){
	    // Pop first element
	    Node n = nodes.get(i);
	    //System.out.println("Node "+i+" clone "+n.cloneId);
	    Node h = n;
	    while(h.parent!=null){
		h.parent.frequency = 0.0f;
		h = h.parent;
	    }	    
	}

	// Calculate all frequencies
	for(int i=0;i<nodes.size();i++){
	    // Pop first element
	    Node n = nodes.get(i);
	    // Add frequency up all parents
	    Node h = n;
	    while(h.parent!=null){
		h.parent.frequency += n.frequency;
		//System.out.println("Node "+i+" par.clone "+h.parent.cloneId+" freq "+h.parent.frequency);
		h = h.parent;
	    }   
	}

        for(int i=0;i<nodes.size();i++){
	    // Pop first element
	    Node n = nodes.get(i);
	    Node h = n;
	    //res.put(new Integer(n.cloneId), new Double(n.frequency));
	    res.put(new Integer(n.cloneId), new Float(n.frequency));
	    /// This puts frequencies of current clones only
	    //if(h.parent!=null)
	    //while(h.parent.cloneId==h.cloneId){
	    // This updates frequencies of current clones and all their parents
	    while(h.parent!=null){
		h = h.parent;
		//res.put(new Integer(h.cloneId), new Double(h.frequency));
		res.put(new Integer(h.cloneId), new Float(h.frequency));		
		//if(h.parent==null) break;
	    }
	    //System.out.println("final for i="+i+": "+res.get(n.cloneId));
	}

	return res;
    }

    public HashSet<Integer> getUpdatedClonesList(HashSet<Integer> currentList){
	HashSet<Integer> res = new HashSet<Integer>();

	for(Iterator i = currentList.iterator();i.hasNext();){
	    Integer cid = (Integer)i.next();
	    // Get the cloneId, traverse tree upwards until we find a cloneId that is already present in the tree
	    // It does not matter where we find the cloneId node, whether at shallow or deeper ends...
	    Node n = (Node)cloneNodeMap.get(cid);
	    while(n.parent!=null){
		Node p = n.parent;
		// speedup
		if(p.cloneId!=n.cloneId){
		    if(!currentList.contains(p.cloneId)){
			// res list will contain only parent cloneIds of the cloneIds in currentList
			if(!res.contains(p.cloneId)){
			    res.add(p.cloneId);
			} else {
			    // speedup, result list already contains cloneid means that it also contains all
			    // parents of cloneid, so we can break
			    break;
			}
		    } else {
			// speedup, if current list already contains this cloneid as a parent, we have
			// either already added all parents of that or will do so...
			break;
		    }
		}
		n = p;
	    }
	}
	//System.out.println("Number of parent clones added: "+res.size());
	// Add parent cloneIds that did not exist in currentList
	currentList.addAll(res);
	return currentList;
    }


    public static void delSubtreeDFS(Node n){
	if(n.left==null){
	    terminateClone(n.cloneId);	   
	    n = null;
	} else {
	    delSubtreeDFS(n.left);
	    delSubtreeDFS(n.right);
	    terminateClone(n.cloneId);
	    n.left = null;
	    n.right = null;
	    n = null;
	}
    }
    
    public static void pruneDFS(Node n){
	if(n.right!=null){
	    if(!_currentList.contains(n.right.cloneId)){
		// Remove the reference from parent to right node
		delSubtreeDFS(n.right);
		//n.right = null;
		// The above remove should be enough for the entire subtree to the right to be removed
		// If the internal node is the root, then its parent(mega-root) is special
		// because it does not point to the root node, only the root points to the mega root
		Node newn = n.left;
		if(n.parent.left==null&&n.parent.right==null){
		    // Re-route left node
		    n.left.parent = n.parent;
		    // Snip off internal node
		    n = null;
		    //delSubtreeDFS(n);
		} else {		    
		    // If the parent node's left child is the internal node
		    if(n.parent.cloneId==n.cloneId){
			n.parent.left = n.left;
			n.left.parent = n.parent;
			n = null;
			//delSubtreeDFS(n);
		    } else {
			// The parent node's right child is the internal node
			n.parent.right = n.left;
			n.left.parent = n.parent;
			n = null;
			//delSubtreeDFS(n);
		    }
		}
		// Whichever I remove, traversal continues with left child of node n which has been snipped
		pruneDFS(newn);
	    } else {
		// The right node is present in our list, so continue traversing
		pruneDFS(n.left);
		pruneDFS(n.right);
	    }
	}
	// If left child of n is null, we reached a leaf node, so function will simply return
    }

    // non-rec
    public static void delSubtreeDFSLoop(Node n){
	/*
	Stack stack = new Stack();
	// Push root 
	stack.push(n);
	while(!stack.empty()){
	    n = (Node)stack.pop();
	    if(n.left==null){
		terminateClone(n.cloneId);	   
		n = null;
	    } else {
		stack.push(n);
		stack.push(n.right);
		stack.push(n.left);		
	    }
	}
	*/
	// Try to rely on garbage collector to remove the entire subtree?
	// Old...
	//n.left = null;
	//n.right = null;
	//n = null;

	// New iterative marking of subtrees as null
	Stack stack = new Stack();
	// Push children of root 
	if(n.left!=null) stack.push(n.left);
	if(n.right!=null) stack.push(n.right);
	// Mark root for garbage collection
	n = null;
	while(!stack.empty()){
	    n = (Node)stack.pop();
	    if(n.left!=null) stack.push(n.left);
	    if(n.right!=null) stack.push(n.right);
	    n = null;
	}
    }


    // Non-recursion
    public static void pruneDFSLoop(){
	Stack stack = new Stack();
	// Push root 
	stack.push(root);
	Node n;
	// End loop when we visited all nodes
	while(!stack.empty()){
	    // Get node
	    n = (Node)stack.pop();
	    	    	
	    if(n.right!=null){
		if(!_currentList.contains(n.right.cloneId)){
		    // Remove the reference from parent to right node
		    delSubtreeDFSLoop(n.right);
		    //n.right = null;
		    // The above remove should be enough for the entire subtree to the right to be removed
		    // If the internal node is the root, then its parent(mega-root) is special
		    // because it does not point to the root node, only the root points to the mega root
		    Node newn = n.left;
		    if(n.parent.left==null&&n.parent.right==null){
			// Re-route left node
			n.left.parent = n.parent;
			// Snip off internal node
			n = null;
			//delSubtreeDFS(n);
		    } else {		    
			// If the parent node's left child is the internal node
			if(n.parent.cloneId==n.cloneId){
			    n.parent.left = n.left;
			    n.left.parent = n.parent;
			    n = null;
			    //delSubtreeDFS(n);
			} else {
			    // The parent node's right child is the internal node
			    n.parent.right = n.left;
			    n.left.parent = n.parent;
			    n = null;
			    //delSubtreeDFS(n);
			}
		    }
		    // Whichever I remove, traversal continues with left child of node n which has been snipped
		    stack.push(newn);
		} else {
		    // The right node is present in our list, so continue traversing
		    // stack.push(n);
		    stack.push(n.right);
		    stack.push(n.left);
		}
	    }
	}
	// If left child of n is null, we reached a leaf node, so function will simply return
    }



    private static HashSet<Integer> _currentList;
    public static void pruneExtinctClonesSubtrees(HashSet<Integer> currentList){
	_currentList = currentList;
	pruneDFSLoop();
    }



    /** Prints the phylogeny only of a selected set of clones, iterative DFS
     * Adapted from BSC5936 Fall 2005 Computational Evolutionary Biology Lecture notes
     * of Fredrik Ronquist
     * @param cloneList A hashset containing the id of clones
     * @return A string representing the phylogeny in newick format
     */
    private static Node p;
    public static String getNewickMarkedTips(HashSet<Integer> cloneList){

	// Mark current clones in phylogeny and all their parents as NOT visited
	// use frequency field as a visited flag
	for (Iterator i = cloneNodeMap.keySet().iterator(); i.hasNext();) {
            Integer key = (Integer)i.next();	    
	    Node n = (Node)cloneNodeMap.get(key);
	    n.frequency = 0.0f;
	    Node h = n;
	    while(h.parent!=null){
		h.parent.frequency = 0.0f;
		h = h.parent;
	    }
	}

	// The algorithm for marking the nodes in the list
	// 1. Start from marked tips, traverse up the parents
	// Every time the node is a right child of the parent
	// mark the parent, since it is a parent clone
	
	for(Iterator i = cloneList.iterator();i.hasNext();){
	    Integer cid = (Integer)i.next();
	    // Get the node in the phylogeny
	    Node n = (Node)cloneNodeMap.get(cid);
	    // Mark the leaf node having the clone
	    n.frequency = 4.0f;
	    Node h = n;
	    int tempId = n.cloneId;
	    while(h.parent!=null){
		// Every time the child is a right child, mark the parent node
		// mega-root has a left child root
		if(h.parent.cloneId!=tempId){
		    // For efficiency, prune upward search here
		    // If the parent is already marked
		    // continue with the next marked node
		    if(h.parent.frequency==4.0f){
			break;
		    }
		    h.parent.frequency = 4.0f;
		}
		h = h.parent;
		tempId = h.cloneId;
	    }
	}

	// Obtain reference to root
	p = root;

	String newickPhylogeny = "";
	// Root node has a mega root parent, function will exit
	// when root node is marked 
	while(p.parent!=null){	    
	    p.frequency += 1.0f;
	    //System.out.println("visiting node "+p.cloneId+" freq="+p.frequency);
	    //System.out.println("p = "+newickPhylogeny);
	    // If p is not a tip
	    if(p.left!=null){
		if(p.frequency==1.0f||p.frequency==5.0f){		    
		    if(p.frequency==5.0f){
			newickPhylogeny+="(";
		    }
		    p = p.left;
		} 
		else if(p.frequency==2.0f||p.frequency==6.0f){
		    if(p.frequency==6.0f){
			newickPhylogeny+=",";
		    }
		    p = p.right;		
		}
		else if(p.frequency==3.0f||p.frequency==7.0f){
		    if(p.frequency==7.0f){
			newickPhylogeny+=")";
			// Print branch length
			// traverse up until you hit a marked node.
			float tipTime = p.time;
			Node k = p;
			// we reached mega-root, time of mega-root is 0.0
			//if(k.parent==null){
			//    newickPhylogeny+=":"+(tipTime-0.0f);
			//}
			while(k.parent!=null){
			    k = k.parent;
			    // If we hit a marked node
			    // k.parent could be the parent of mega-root, which is null
			    if(k.frequency==5.0f||k.frequency==6.0f||k.frequency==7.0f||k.parent==null){
				newickPhylogeny+=":"+(tipTime-k.time);
				break;
			    }
			}
		    }
		}			
	    }
	    // If p is a tip,or an internal node
	    if(p.frequency==3.0f||p.frequency==7.0f||p.left==null){
		// If p is a tip
		if(p.left==null){
		    // If marked, print it
		    if(p.frequency==4.0f){
			newickPhylogeny+=p.cloneId;
			// Print branch length
			// traverse up until you hit a marked node.
			float tipTime = p.time;
			Node k = p;
			while(k.parent!=null){
			    k = k.parent;
			    // If we hit a marked node
			    if(k.frequency==5.0f||k.frequency==6.0f||k.frequency==7.0f){
				newickPhylogeny+=":"+(tipTime-k.time);
				break;
			    }
			}
		    }
		}
		p = p.parent;		
	    }
	}

	// End semicolon
	newickPhylogeny+=";";
	return newickPhylogeny;
    }




    public void printCurrentClones(){
	Collection c = cloneNodeMap.values();
        // Obtain an Iterator for Collection
        Iterator itr = c.iterator();
        // Iterate through TreeMap values iterator
        while(itr.hasNext()){
            Node n = (Node)itr.next();	    
	    //System.out.println(n.cloneId+","+n.parent.cloneId+","+n.time+","+n.mutation_difference+","+getSurvival(n.cloneId)+","+getReproduction(n.cloneId));
        }
	System.out.println("--------------");
    }

    /**
     * Main for testing/debugging purposes
     */
    
    
    public static void main(String[] args){
	ClonePhylogeny tree = new ClonePhylogeny();
	//System.out.println(tree.root.cloneId);
        //tree.root.time=10;
	//System.out.println(((Node)cloneNodeMap.get(0)).time);
	try{
	    tree.addNewClone(1,5.0,0,1,false);
	    tree.addNewClone(2,10.0,0,2,false);
	    //tree.terminateClone(1,10);
	    tree.addNewClone(3,20.0,1,3,false);
	    tree.addNewClone(4,23.0,3,4,false);
	}
	catch (Exception ex){
	    System.out.println( "Exception caught in ClonePhylogeny.main(): " + ex.getMessage());
	    ex.printStackTrace(System.out);
	}
	//tree.terminateClone(2,20);
	// Terminate tree at time x
	tree.terminate(25.0);
	System.out.println(getNewick());

	HashSet<Integer> marked_clones = new HashSet();
	marked_clones.add(0);
	//marked_clones.add(2);
	//marked_clones.add(2);
	marked_clones.add(1);
	marked_clones.add(3);
	marked_clones.add(4);

	System.out.println(getNewickMarkedTips(marked_clones));
	System.out.println(getNewick());
    }
    

}


