package src.percolation;

import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;

import src.utils.Utils;

public class Pr1_b {

	public static void main(String[] argvs) throws Exception {
		
		//ahora quiero ver la proba f(p,p+dp)dp de que aparezca el cluster percolante.
		
		int minLarge;
		int maxLarge;
		int increment;
		int trials;
		
		double eps;
		
		try {
			minLarge = new Integer(argvs[0]);
			maxLarge = new Integer(argvs[1]);
			increment = new Integer(argvs[2]);
			trials = new Integer(argvs[3]);
			
			eps = new Double(argvs[4]);
			
		} catch (Exception e) {
			showUsage();
			return;
		}
		
		
		Random gen = new Random();

		Map<Double,Map> sizeAndcDF = new TreeMap<Double,Map>();
		
		for(int large = minLarge; large<maxLarge; large+=increment) {
			
			//cumulative distribution function
			Map<Double,Double> cDF = getCumulativeDistFunction(gen, large,trials,eps);

			Utils.printToFile(cDF, "CDF"+large+".dat");
		}		
	}
	
	public static Map<Double,Double> getCumulativeDistFunction(Random randomGenerator, int large, int trials,double eps) {

		Map<Double,Double> cumulativeDF = new TreeMap<Double,Double>();
		
		//natural limits are hard-coded, near the percolation critical probability
		for(double probability=0.56; probability < 0.64; probability += eps) {
			int successes = 0;
			for(int i=0; i<trials; i++) {
				long seed = randomGenerator.nextLong();
				Red red = new Red(large,seed);
				red.createGrid(probability);
				if (red.doesPercolate())
					successes++;
			}
			cumulativeDF.put(probability, successes*1.0/trials);
		}
		return cumulativeDF;
	}
	
	/*
	public static Map<Double,Double> getPDF(Map<Double,Double> cumulativeDF) {
		
		Map<Double,Double> pdf = new TreeMap<Double,Double>();
				
		Double[] cumProbabilities = new Double[pdf.size()];
		
		pdf.values().toArray(cumProbabilities);
		
		
		
		
		int vectorSize = cumProbabilities.length; 
		
		//derivative of the pdf will be computed to second order: (f(n+1)+f(n-1))/2h
		//on the borders, special care is required: only to first order
		
		double minX = cumulativeDF.keySet().iterator().next(); //minimum value of x, in the probability f(x) space

		double eps = cumProbabilities[1].doubleValue() - cumProbabilities[0].doubleValue();  
		
		double f1 = cumProbabilities[1].doubleValue(); 
		double f0 = cumProbabilities[0].doubleValue();
		
		//first value computed to first order
		pdf.put(minX, (f1-f0)/eps);
			
		for(int i=1; i<vectorSize-1;i++) {
			double proba = cumProbabilities[i+1].doubleValue() - cumProbabilities[i-1].doubleValue();
			pdf.put(minX + eps*i , proba/(2*eps));
		}
		//finally the last bit, again first order difference
		double finalProba = cumProbabilities[vectorSize-1].doubleValue() - cumProbabilities[vectorSize-2].doubleValue();
		pdf.put(minX+eps*vectorSize, finalProba/eps);
		
		return pdf;
	}
	*/
	
	private static void showUsage() {
		System.out.println("Usage: ");
		
		System.out.println("<minLarge> <maxLarge> <increment>\n");
		
		System.out.println("<minLarge>: minimum size of net.");
		System.out.println("<maxLarge>: maximum size of net.");
		System.out.println("<increment>: step between sizes.");
		System.out.println("<trials>: number of trials for statistics on each netSize.");
		System.out.println("<eps>: step between probabilities.\n\n.");
		System.out.println("Several output files will result in the form CDF<large>.dat");
		
	}
	
}


