/**
 * File: PruneGraphDefaultController.java
 * Created by: mhaimel
 * Created on: Apr 21, 2010
 * CVS:  $Id: PruneGraphDefaultController.java 1.0 Apr 21, 2010 2:35:26 PM mhaimel Exp $
 */
package uk.ac.ebi.curtain.controller.graph;

import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicReference;

import org.apache.commons.math.stat.descriptive.moment.StandardDeviation;

import uk.ac.ebi.curtain.controller.SingleSubmitController;
import uk.ac.ebi.curtain.model.graph.Arc;
import uk.ac.ebi.curtain.model.graph.Graph;
import uk.ac.ebi.curtain.model.graph.GraphAccess.TouchEach;
import uk.ac.ebi.curtain.model.graph.curtain.CategoryReadInfo;
import uk.ac.ebi.curtain.model.graph.curtain.ContigInfo;
import uk.ac.ebi.curtain.model.graph.curtain.CurtainGraph;
import uk.ac.ebi.curtain.model.graph.curtain.ReadWrapper;
import uk.ac.ebi.curtain.model.graph.curtain.filter.ArcMultiplicityMean;
import uk.ac.ebi.curtain.model.graph.curtain.filter.ArcMultiplicityMinumCutoff;
import uk.ac.ebi.curtain.model.graph.curtain.filter.NodeActiveFilter;
import uk.ac.ebi.curtain.model.graph.curtain.filter.NodePartialMatchCutoff;
import uk.ac.ebi.curtain.model.graph.curtain.filter.NodePartialStatistic;
import uk.ac.ebi.curtain.utils.StatisticHelper;

/**
 * @author mhaimel
 *
 */
public class PruneGraphDefaultController extends SingleSubmitController<Serializable, Serializable> {

	@Override
	protected void singleSubmit(List<Serializable> inputList) {
		CurtainGraph curtainGraph = getContext().getGraph();
		Graph<ContigInfo, ReadWrapper> graph = curtainGraph.getGraph();
		Integer categories = curtainGraph.getCategories();
		// reindex before starting
		graph.reindexArcs();
		graph.buildArcsIndex();
		
		getLog().info("Prune Arcs of Graph ... ");
		pruneArcs(graph, categories);
		
		getLog().info("Prune Node edges of Graph ... ");
		pruneNodesPartial(graph, categories);
		
		for(Serializable s : inputList){
			submit(s);
		}
	}

	private void pruneNodesPartial(Graph<ContigInfo, ReadWrapper> graph,Integer categories) {
		getLog().debug("Prune Node edges based on Partial hits... ");
		int[] maxCutoff = new int[categories];
		boolean doFilter = false;
		for(int i = 0; i < categories; ++i){
			Integer maxPartialCount = getContext().getArguments().getMaxPartialCount(i);
			if(null != maxPartialCount){
				if(maxPartialCount > 0 || maxPartialCount < 0){
					doFilter = true;
				} 
			}
		}
		
		if(!doFilter){
			getLog().debug("Partial filter DISABLED!");
		} else {
			getLog().debug("Partial: Calculate statistics of active nodes ... ");
			NodePartialStatistic partStat = new NodePartialStatistic(categories);
			graph.processNodes(new NodeActiveFilter(true,partStat));
			for(int i = 0; i < categories; ++i){
				List<Integer> sArr = partStat.getSortedArray(i);
				// remove low / high frequent noise to get a nicer distribution
				int cMax = 0;
				Integer max = StatisticHelper.max(sArr);
				Integer median = StatisticHelper.median(sArr);
				Double std = StatisticHelper.getStandardDeviation(sArr);
				cMax = median + std.intValue();
				Integer maxPartialCount = getContext().getArguments().getMaxPartialCount(i);
				if(null != maxPartialCount){
					if(maxPartialCount > 0){
						maxCutoff[i] = maxPartialCount;
					} else if(maxPartialCount.equals(-1)){
						maxCutoff[i] = cMax;
					} else {					
						maxCutoff[i] = max+1;
					}
				} else {
					maxCutoff[i] = max+1;
				}
			}
			
			/* min cutoff -> nearly no gap -> loss of data!!!
			 * only cut off high partial nodes 
			 */
			getLog().info("Node partial cutoff values: max("  + Arrays.toString(maxCutoff)+");");
			graph.processNodes(new NodePartialMatchCutoff(maxCutoff, true, graph));

			// reindex 
			graph.buildArcsIndex();
			graph.reindexArcs();
		}
	}

	private void pruneArcs(Graph<ContigInfo, ReadWrapper> graph,Integer categories) {
		int[] minCutoff = new int[categories];
//		int[] maxCutoff = new int[categories];
		Arrays.fill(minCutoff, getContext().getArguments().getMinPairCountDefault());
//		getLog().debug("Default multiplicity cutoffs: "  + Arrays.toString(minCutoff));
		graph.processArcs(new ArcMultiplicityMinumCutoff(minCutoff));
		ArcMultiplicityMean mean = new ArcMultiplicityMean(categories);
		graph.processArcs(mean);
		double[] meanArr = mean.getMeans();
		getLog().debug("Multiplicity mean values: " + Arrays.toString(meanArr));
		for(int i = 0; i < categories; ++i){
			double m = meanArr[i];
			// Simple: set half of the average value as min cutoff
			int cOff = (int) Math.round(m/2);
			Integer minPairCount = getContext().getArguments().getMinPairCount(i);
			if(minPairCount != null){
				minCutoff[i] = minPairCount;
			} else {
				minCutoff[i] = Math.max(cOff, minCutoff[i]);
			}
			// Simple: set double of the average value as max cutoff
//			maxCutoff[i] = (int) Math.round(m*2);
		}	
//		for(int i = 0; i < categories; ++i){
//			final AtomicReference<Double> sum = new AtomicReference<Double>(0d);
//			final double cmean = meanArr[i];
////			double sum = 0d;
////			// SUM += (X - M)^2
////			for(Integer val : unsorted){
////				sum += Math.pow((val.doubleValue()-mean.doubleValue()),2);
////			}
////			double size = unsorted.size();
////			//(SUM/N-1)^(1/2)
////			double deviation = Math.sqrt(sum /size); 	
////			return deviation;
//			graph.processArcs(new TouchEach<Arc<ContigInfo, ReadWrapper>>(){
//				@Override
//				public void touch(Arc<ContigInfo, ReadWrapper> arc) {
//					 if(null == arc)
//				            return;
//					CategoryReadInfo value = arc.getValue().getInfo(cId);
//					if(null != value){
//						value.getMultiplicity().doubleValue() - 
//						a.add(value.getMultiplicity().doubleValue());
//					}
//			}});
//		}
		// rerun
//		getLog().debug("Revised multiplicity cutoffs: min("  + Arrays.toString(minCutoff)+"); max("+Arrays.toString(maxCutoff)+")");
		getLog().info("Multiplicity cutoffs: min("  + Arrays.toString(minCutoff)+");");
		graph.processArcs(new ArcMultiplicityMinumCutoff(minCutoff));
		
		// max cutoff for long insert lengths is not the right way!!!
		// BUT will do for the moment - first implementation
//		getLog().info("Revised multiplicity cutoffs: max("+Arrays.toString(maxCutoff)+")");
//		graph.processArcs(new ArcMultiplicityMaximimCutoff(maxCutoff)); 

		// reindex 
		graph.buildArcsIndex();
		graph.reindexArcs();
	}

}
