package ca.uwindsor.cs.deepweb.estimation.method.border;

import java.math.BigInteger;
import java.util.ArrayList;

import org.apache.commons.math3.stat.descriptive.SummaryStatistics;

public class ExperimentTwoQueryPoolRemoveCorrelationSimplified {

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// args[0] index directory
		// args[1] querypool A file with df and weight
		// args[2] querypool B file with df and weight
		// args[3] number of trials

		if (args.length != 4) {
			System.exit(-1);
		}

		System.out.println("Border method removed correlation:" + args[0]);

		ArrayList<Integer> t = new ArrayList<Integer>();
//		 t.add(10);
//		 t.add(50);
		t.add(100);
		t.add(500);
		t.add(1000);
		t.add(2000);
		t.add(2500);
		for (int wordsquantity : t) {
			String[] wordsA = new String[wordsquantity];

			int times = Integer.parseInt(args[3]);
			long dfsum;
			BigInteger meanDF = new BigInteger("0");

			int dab, da, db;
			try {

				SummaryStatistics summary = new SummaryStatistics();
				for (int i = 0; i < times; i++) {
					TermWeightEstimatorThread estimatorDA = new TermWeightEstimatorThread();
					estimatorDA.load(args[1]);
					String[] querypoolA = estimatorDA.getQueryPool();

					TermWeightEstimatorThread estimatorDB = new TermWeightEstimatorThread();
					estimatorDB.load(args[2]);
					String[] querypoolB = estimatorDB.getQueryPool();

					// the following is to estimate DA
					estimatorDA.setNumberofRandomTerms(wordsquantity);
					estimatorDA.start();

					// the following is to estimate DB
					estimatorDB.setNumberofRandomTerms(wordsquantity);
					estimatorDB.start();

					while (estimatorDA.isAlive()) {
						Thread.sleep(1000);
					}
					wordsA = estimatorDA.getTerms();
					da = estimatorDA.getEstimatedSize();

					// the following is to estimate DAB
					QueryRunnerContainExcactlyOneRemoveCorrelationThread runnertwosets = new QueryRunnerContainExcactlyOneRemoveCorrelationThread(
							args[0], querypoolA, querypoolB, wordsA);
					runnertwosets.start();

					// //
					while (estimatorDB.isAlive() || runnertwosets.isAlive()) {
						Thread.sleep(1000);
					}
					dab = runnertwosets.getEstimatedSize();
					db = estimatorDB.getEstimatedSize();

					if (dab * db == 0) {
						i--;
						continue;
					}
					// //
					// e_sizes[i] = da / dab * db;
					summary.addValue(da / dab * db);
					dfsum = estimatorDA.getDFSum() + estimatorDB.getDFSum();
					meanDF = meanDF.add(new BigInteger(Long.toString(dfsum)));
					// System.out.println(meanvalue);
					// System.out.println(sd.evaluate(e_sizes));
					System.gc();
				}
				// sd.evaluate(e_sizes);
				System.out.println(wordsquantity);
				System.out.println(summary.getMean());
				System.out.println(summary.getStandardDeviation());
				System.out.println(meanDF.divide(new BigInteger(Long
						.toString(times))));

			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	}

}
