package ca.uwindsor.cs.deepweb.estimation.method.poolbasedcoverage;

import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;

import org.apache.commons.math3.stat.descriptive.moment.Mean;
import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation;

import ca.uwindsor.cs.deepweb.estimation.experiment.service.QueryService;
import ca.uwindsor.cs.deepweb.estimation.method.border.TermWeightEstimator;

public class C3 {

	protected float coverage;

	protected int starting_df;

	protected double starting_weight;

	protected int samplesize;

	protected int SELECTED_BY_WEIGHT = 1;

	protected int SELECTED_BY_DF = 2;

	protected int select_by;

	protected String index;

	protected String dict;

	protected String termlistfile;

	protected String sampleindex;

	protected int size;

	protected String selectedtermfile;

	protected String termweightdffile;

	protected int numoftrials;

	protected int corpussize;

	/**
	 * @param coverage
	 * @param samplesize
	 * @param startingWeight
	 */
	public C3(String index, String dict, int numberofdocs, float coverage,
			double starting_weight, String termlistfile, String sampleindex,
			String selectedtermfile, String termweightdffile, int numoftrials) {
		this.starting_weight = starting_weight;
		select_by = SELECTED_BY_WEIGHT;
		init(index, dict, numberofdocs, coverage, termlistfile, sampleindex,
				selectedtermfile, termweightdffile, numoftrials);
	}

	/**
	 * @param coverage
	 * @param samplesize
	 * @param starting_df
	 */
	public C3(String index, String dict, int numberofdocs, float coverage,
			int starting_df, String termlistfile, String sampleindex,
			String selectedtermfile, String termweightdffile, int numoftrials) {
		this.starting_df = starting_df;
		select_by = SELECTED_BY_DF;
		init(index, dict, numberofdocs, coverage, termlistfile, sampleindex,
				selectedtermfile, termweightdffile, numoftrials);
	}

	protected void init(String index, String dict, int numberofdocs,
			float coverage, String termlistfile, String sampleindex,
			String selectedtermfile, String termweightdffile, int numoftrials) {
		this.coverage = coverage;
		this.index = index;
		this.dict = dict;
		this.termlistfile = termlistfile;
		this.sampleindex = sampleindex;
		this.size = numberofdocs;
		this.selectedtermfile = selectedtermfile;
		this.termweightdffile = termweightdffile;
		this.numoftrials = numoftrials;
	}

	public void build() throws Exception {
		QueryBasedSampleDocuments2 qbsd = new QueryBasedSampleDocuments2(index,
				dict, termlistfile, sampleindex, size);
		qbsd.start();
		corpussize = qbsd.getCorpusSize();
		QueryService qs = null;
		switch (select_by) {
		case 1:
			break;
		case 2:
			WordSearchAndCoverage search = new WordSearchAndCoverage(
					termlistfile, sampleindex, selectedtermfile);
			search.search(coverage);
			qs = new QueryService(index, corpussize, search.getSelectedTerms(),
					corpussize, null, false);
			break;
		default:
			break;
		}

		qs.setOutputDocumentCaptureFrequencyDistribution(false);
		qs.setOutputORmethodResult(false);
		qs.setOutputDocumentFrequency(false);
		qs.setOutputDocumentSize(false);
		qs.setOutputDocumentSizeDistribution(false);
		qs.setOutputTermWeightandNumberofDocument(true);
		qs.setOutputTermWeightDistribution(false);
		qs.setOutputDocumentWeight(false);
		String result = qs.goQuery();
		String[] temp = result.split("\n");

		// remove xml tags

		FileOutputStream fp;
		fp = new FileOutputStream(termweightdffile);
		for (int i = 4; i < temp.length - 2; i++) {
			temp[i] = temp[i].replace(',', '\t');
			temp[i] = temp[i].replaceAll("\"", "");
			temp[i] = temp[i] + "\n";
			fp.write(temp[i].getBytes("UTF-8"));
			fp.flush();
		}
		fp.close();

	}

	public String[][] estimate(ArrayList<Integer> t) throws IOException {
		TermWeightEstimator e = new TermWeightEstimator();
		e.load(termweightdffile);
		double meanvalue = 0;
		long meandfsum = 0;
		StandardDeviation sd = new StandardDeviation();
		Mean mean = new Mean();
		double[] e_sizes = new double[100];
		double d;
		// HashSet<Integer> uniquedoc = new HashSet<Integer>();
		String[][] result = new String[t.size()][3];

		for (int j = 0; j < t.size(); j++) {
			meanvalue = 0;
			for (int i = 0; i < numoftrials; i++) {
				// uniquedoc.clear();
				d = e.getMeanWeight(t.get(j)) * e.getQueryPoolSize();
				e_sizes[i] = d;
//				meanvalue += (d / numoftrials);
				meandfsum += (e.getDFSum() / numoftrials);
			}
			// System.out.println(wordsquantity);
			result[j][0] = Long.toString(meandfsum);// mean n
			result[j][1] = Double.toString((mean.evaluate(e_sizes) - corpussize)
					/ corpussize);// RB
			result[j][2] = Double.toString(sd.evaluate(e_sizes) / meanvalue);// RSD

		}
		return result;
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		// String index, String dict, int numberofdocs,
		// float coverage, int starting_df, String termlistfile,
		// String sampleindex, String selectedtermfile,
		// String termweightdffile, int numoftrials
		if (args.length != 10) {
			System.exit(-1);
		}
		int starting_df = Integer.parseInt(args[4]);
		C3 c3 = new C3(args[0], args[1], Integer.parseInt(args[2]), Float
				.parseFloat(args[3]), starting_df, args[5], args[6], args[7],
				args[8], Integer.parseInt(args[9]));
		try {
			c3.build();
			ArrayList<Integer> t = new ArrayList<Integer>();
			t.add(10);
			t.add(50);
			t.add(100);
			t.add(500);
			t.add(1000);
			String[][] result = c3.estimate(t);
			for (int i = 0; i < result.length; i++) {
				for (int j = 0; j < result[i].length; j++) {
					System.out.print(result[i][j]);
					System.out.print("\t");
				}
				System.out.print("\n");
			}
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

}
