package preprocessing;

import gridmath.CaseMatrixBase;
import gridmath.NumberOfCombinations;

import java.util.Vector;
import pac.SearchDatastructure;
import datastructures.WorkUnit;

public class ParameterOptimizer {

	private SearchDatastructure searcher;
	private int sample_increment;
	private final int min_k = 1;
	private final int max_k = 40;
	private double[][] windowBoundaries;
	public ParameterOptimizer(SearchDatastructure I) { 
		searcher = I;
		sample_increment = 1; 
		windowBoundaries = searcher.windowMaker.getWindowBoundaries();
	}
	
	public Vector<WorkUnit> optimallyDistributeWork (CaseMatrixBase pSignificantMatrix, CaseMatrixBase pLEMatrix, NumberOfCombinations exhaustiveCombinations) {
		
		Vector<WorkUnit> distributedWork = new Vector<WorkUnit>();
		double[] maxFreqs ; 
		int[] numberOfVariables;
		
		System.out.println("|----------------------------------------------------------------------| 100%");
		System.out.print(" ");
		for (int i=0; i<searcher.windowMaker.getNoOfWindows(); i++) 
		{
			System.out.print(".");
			for(int j=i; j<searcher.windowMaker.getNoOfWindows(); j++) 
			{
				maxFreqs = new double[] { windowBoundaries[i][1] , windowBoundaries[j][1] } ;
				numberOfVariables = new int[] { searcher.windowMaker.counter[i] , searcher.windowMaker.counter[j] };
				WorkUnit optimalWorkUnit2D = findOptimalSearchParameters (
						pSignificantMatrix.getMatrix2D()[i][j], pLEMatrix.getMatrix2D()[i][j], 
						maxFreqs, numberOfVariables , (long) exhaustiveCombinations.getMatrix2D()[i][j], 
						new int[]{i,j} );

				if ( optimalWorkUnit2D != null )
					distributedWork.add( optimalWorkUnit2D );
			}
		}		
		
		return distributedWork;
		
	}
	
	
	
	public double getCandidatesPerSampleIncrement(int k, int trial_increments, double[] maxFrequencies, int[] numberOfVariables) 
	{
		
		double vars_from_window1 = numberOfVariables[0]*Math.pow(maxFrequencies[0],k); 
		double vars_from_window2 = numberOfVariables[1]*Math.pow(maxFrequencies[1],k) ;
		double candidates_per_trial_increment = (double) trial_increments * vars_from_window1 * vars_from_window2 ; 
		return candidates_per_trial_increment;
	}
	
	
	
	//Minimize Objective function
	public WorkUnit findOptimalSearchParameters ( double pSignificance, double pNull, double[] maxFrequencies, 
																						int[] numberOfVariables, long exhaustiveCombinations, 
																						int[] coordinates ) 
	{
		if ( pSignificance == 0 || pNull == 0 )
			return null;
		
		for(int var=0; var<maxFrequencies.length; var++) {
			if ( maxFrequencies[var] == 0 || numberOfVariables[var] == 0 )
				return null;
		}
		
		double total_power = 0;
		double worstCaseCombinations = 0;
		int[] samples_per_k = new int[max_k]; //default 0 
		double [] power_per_k = new double [max_k]; //again, default 0;
	
		//brute force would have given you, on average, how much power per lookup? 
		//(you would only have to look at a fraction of the exhaustive pairs)
		double required_power = searcher.userParams.getPower() ;
//		double min_bang_per_buck =  (double) 1 / (required_power * exhaustiveCombinations);
		
		//repeat till you achieve desired power
		while ( total_power < required_power ) 
		{
			int best_k = -1;
			double best_power = 0;
			double best_candidates = 0;
			double best_sample_increment = 0;
			double best_bang_per_buck = 0;
			
			//Do the raster scan to find k with the steepest slope (power gains) for the current setup.
			for(int this_k=min_k; this_k<max_k; this_k++) 
			{
				//reset increment, and compute the power for 1 trial at this k.
				sample_increment = 1;
				double power_increment_k = 1 - Math.pow( (1-Math.pow(pSignificance,this_k)), (samples_per_k[this_k]+sample_increment) ) 
																	- power_per_k[this_k];

				//Now, this is a fine-balance. you don't want to go down too deep, because lots of sampling is expensive on large genomes.
				//For large GWAS datasets, between 500K to 1M SNPs on 1000s of individuals, we found these values to be the most useful.

				//Ignore these. they take, like, FOREVER to add up. Like seriously.
				if  ( power_increment_k < 1e-8) 
					continue;
				
				//the others, you have to calculate at a coarser scale - or they take too long.
				else if ( power_increment_k < 5e-8 ) 
					sample_increment = 25000;
				else if ( power_increment_k < 1e-7 ) 
					sample_increment = 5000;
				else if ( power_increment_k < 5e-7 ) 
					sample_increment = 1000;
				else if ( power_increment_k < 1e-6 ) 
					sample_increment = 250;
				else if  ( power_increment_k < 5e-6 ) 
					sample_increment = 50;
				
				//recompute
				power_increment_k = 1 - Math.pow( (1-Math.pow(pSignificance,this_k)), (samples_per_k[this_k]+sample_increment) ) 
															- power_per_k[this_k];
				
				//calculate the appropriate ROI
				double candidates_per_sample_increment = getCandidatesPerSampleIncrement ( this_k, sample_increment, maxFrequencies, numberOfVariables ) ;				
				
				//how many power points are you getting per candidate?
				double bang_per_buck = power_increment_k / ( candidates_per_sample_increment + 0.1*sample_increment ); 
																			//Double Damage: you pay because you have to perform the samples, and then in addition, test the candidates 
																			//Also prevents low denominator from hijacking the payoff.
																			//(Bayesian version) at least 1 FP per trial : 10 trial increment => at least 10 FPs.
																			//Math.max( candidates_per_trial_increment, trial_increments ); 
																			
				//update your best k, if this is better than brute force.
				if ( bang_per_buck > best_bang_per_buck )
					//	&& ( bang_per_buck >= min_bang_per_buck || this_k == min_k ) ) 
																		//you need to go with the min by default - the filter with the most bang (not per buck)
				{
					best_k = this_k;
					best_power = power_increment_k;
					best_candidates = candidates_per_sample_increment;
					best_sample_increment = sample_increment;
					best_bang_per_buck = bang_per_buck;
				}
			}
			
			//is just one k gives you all you need, then flush the others clean.
			if( best_power >= searcher.userParams.getPower() ) {
				for(int i=0; i<samples_per_k.length; i++) {
					if (i==best_k)
						continue;
					//else
					samples_per_k[i] = 0;
					power_per_k[i] = 0;
					worstCaseCombinations = 0;
					
					total_power=0;
				}				
			}
			
			samples_per_k[best_k] += best_sample_increment;
			power_per_k[best_k] += best_power;	
			worstCaseCombinations += best_candidates;
			
			total_power += best_power;
		}
		
		return 	new WorkUnit ( samples_per_k, power_per_k, coordinates, (long) exhaustiveCombinations, worstCaseCombinations);
	
	}
	
	
	
}
