/**
 * File: PruneGraphDefaultController.java
 * Created by: mhaimel
 * Created on: Apr 21, 2010
 * CVS:  $Id: PruneGraphDefaultController.java 1.0 Apr 21, 2010 2:35:26 PM mhaimel Exp $
 */
package uk.ac.ebi.curtain.controller.graph;

import java.io.Serializable;
import java.util.Arrays;
import java.util.List;

import uk.ac.ebi.curtain.controller.SingleSubmitController;
import uk.ac.ebi.curtain.model.graph.Graph;
import uk.ac.ebi.curtain.model.graph.curtain.ContigInfo;
import uk.ac.ebi.curtain.model.graph.curtain.CurtainGraph;
import uk.ac.ebi.curtain.model.graph.curtain.ReadWrapper;
import uk.ac.ebi.curtain.model.graph.curtain.filter.ArcMultiplicityMaximimCutoff;
import uk.ac.ebi.curtain.model.graph.curtain.filter.ArcMultiplicityMean;
import uk.ac.ebi.curtain.model.graph.curtain.filter.ArcMultiplicityMinumCutoff;
import uk.ac.ebi.curtain.model.graph.curtain.filter.NodeActiveFilter;
import uk.ac.ebi.curtain.model.graph.curtain.filter.NodePartialMatchCutoff;
import uk.ac.ebi.curtain.model.graph.curtain.filter.NodePartialStatistic;
import uk.ac.ebi.curtain.utils.StatisticHelper;

/**
 * @author mhaimel
 *
 */
public class PruneGraphGenerousController extends SingleSubmitController<Serializable, Serializable> {

	@Override
	protected void singleSubmit(List<Serializable> inputList) {
		CurtainGraph curtainGraph = getContext().getGraph();
		Graph<ContigInfo, ReadWrapper> graph = curtainGraph.getGraph();
		Integer categories = curtainGraph.getCategories();
		// reindex before starting
		graph.reindexArcs();
		graph.buildArcsIndex();
		getLog().info("Prune Arcs of Graph ... ");
		pruneArcs(graph, categories);
		// reindex 
		graph.buildArcsIndex();
		graph.reindexArcs();
		getLog().info("Prune Node edges of Graph ... ");
		pruneNodesPartial(graph, categories);
		// reindex 
		graph.reindexArcs();
	}

	private void pruneNodesPartial(Graph<ContigInfo, ReadWrapper> graph,Integer categories) {
		getLog().debug("Prune Node edges based on Partial hits... ");
		int[] minCutoff = new int[categories];
		int[] maxCutoff = new int[categories];

		Arrays.fill(minCutoff, 3);
		getLog().debug("Partial: Calculate statistics of active nodes ... ");
		NodePartialStatistic partStat = new NodePartialStatistic(categories);
		graph.processNodes(new NodeActiveFilter(true,partStat));
		for(int i = 0; i < categories; ++i){
			List<Integer> sArr = partStat.getSortedArray(i);
			// remove low / high frequent noise to get a nicer distribution
			Integer cMin = minCutoff[i];
			int cMax = 0;
			sArr = StatisticHelper.onlyAbove(cMin, sArr); 
			Integer median = StatisticHelper.median(sArr);
			Double std = StatisticHelper.getStandardDeviation(sArr);
			cMin = Math.max(median-std.intValue(), 3);
			cMax = median + std.intValue();
			minCutoff[i] = cMin;
			maxCutoff[i] = cMax;
		}
		
		/* min cutoff -> nearly no gap -> loss of data!!!
		 * only cut off high partial nodes 
		 */
		graph.processNodes(new NodePartialMatchCutoff(maxCutoff, true, graph));
	}

	private void pruneArcs(Graph<ContigInfo, ReadWrapper> graph,Integer categories) {
		int[] minCutoff = new int[categories];
		int[] maxCutoff = new int[categories];
		Arrays.fill(minCutoff, 3);
		getLog().debug("Default multiplicity cutoffs: "  + Arrays.toString(minCutoff));
		graph.processArcs(new ArcMultiplicityMinumCutoff(minCutoff));
		ArcMultiplicityMean mean = new ArcMultiplicityMean(categories);
		graph.processArcs(mean);
		double[] meanArr = mean.getMeans();
		getLog().debug("Multiplicity mean values: " + Arrays.toString(meanArr));
		for(int i = 0; i < categories; ++i){
			double m = meanArr[i];
			// Simple: set half of the average value as min cutoff
//			int cOff = (int) Math.round(m/2);
			
			/* revised cutoff - set to a 1/3 */
			int cOff = (int) Math.round(m/3);
			if(cOff > minCutoff[i]){
				minCutoff[i] = cOff;
			}
			// Simple: set double of the average value as max cutoff
//			maxCutoff[i] = (int) Math.round(m*2);
			
			/* revised cutoff - set to set triple */			
			maxCutoff[i] = (int) Math.round(m*3);
		}	
		// rerun
		getLog().debug("Revised multiplicity cutoffs: min("  + Arrays.toString(minCutoff)+"); max("+Arrays.toString(maxCutoff)+")");
		graph.processArcs(new ArcMultiplicityMinumCutoff(minCutoff));
		
		// max cutoff for long insert lengths is not the right way!!!
		// BUT will do for the moment - first implementation
		graph.processArcs(new ArcMultiplicityMaximimCutoff(maxCutoff)); 
	}

}
