package ca.uwindsor.cs.deepweb.estimation.method.border;

import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;

import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;

import ca.uwindsor.cs.deepweb.estimation.RandomWordSelector;

public class ExperimentTwoQueryPool {

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// args[0] index directory
		// args[1] querypool A file
		// args[2] querypool B file
		// args[3] number of random queries needed
		// args[4] number of trials
		// args[5] directory to save xls files
		// args[6] directory prefix

		if (args.length != 7) {
			System.exit(-1);
		}

		int wordsquantity = Integer.parseInt(args[3]);
		ArrayList<String> wordsA = new ArrayList<String>(wordsquantity);
		ArrayList<String> wordsB = new ArrayList<String>(wordsquantity);

		int times = Integer.parseInt(args[4]);
		int meanvalue = 0;
		StandardDeviation sd = new StandardDeviation();

		double[] e_sizes = new double[times];
		int dab, da, db;

		try {
			for (int i = 0; i < times; i++) {
				wordsA.clear();
				wordsB.clear();

				// wb,对应一个内存中的excel文档
				HSSFWorkbook wb = new HSSFWorkbook();

				// the following is to estimate DA
				RandomWordSelector selector = new RandomWordSelector(
						args[1], "UTF-8");
				ArrayList<String> querypoolA = selector.getWordList();

				for (int j = 0; j < wordsquantity; j++) {
					wordsA.add(selector.getRandomPhrase());
				}
				QueryRunner runnerA = new QueryRunner(args[0], querypoolA,
						wordsA);
				da = runnerA.esitmate();
				Set<Integer> dauniqueids = new HashSet<Integer>(runnerA
						.getUniqueDocumentIDs());
				// System.out.println("DA:" + runnerA + "\n");
				// sheet创建一个工作页
				HSSFSheet sheetDA = wb.createSheet("DA");
				for (int j = 0; j < wordsA.size(); j++) {
					// HSSFRow,对应一行
					HSSFRow row = sheetDA.createRow(j);

					HSSFCell cellWord = row.createCell(0);
					cellWord.setCellValue(wordsA.get(j));
					HSSFCell cellWeight = row.createCell(1);
					cellWeight.setCellValue(runnerA.getWeightofQueries()[j]);
				}

				// the following is to estimate DB
				selector = new RandomWordSelector(
						args[2], "UTF-8");
				ArrayList<String> querypoolB = selector.getWordList();
				for (int j = 0; j < wordsquantity; j++) {
					wordsB.add(selector.getRandomPhrase());
				}
				QueryRunner runnerB = new QueryRunner(args[0], querypoolB,
						wordsB);
				db = runnerB.esitmate();
				Set<Integer> dbuniqueids = new HashSet<Integer>(runnerB
						.getUniqueDocumentIDs());
				// System.out.println("DB:" + runnerB + "\n");
				// sheet创建一个工作页
				HSSFSheet sheetDB = wb.createSheet("DB");
				for (int j = 0; j < wordsB.size(); j++) {
					// HSSFRow,对应一行
					HSSFRow row = sheetDB.createRow(j);

					HSSFCell cellWord = row.createCell(0);
					cellWord.setCellValue(wordsB.get(j));
					HSSFCell cellWeight = row.createCell(1);
					cellWeight.setCellValue(runnerB.getWeightofQueries()[j]);
				}

				// the following is to estimate DAB
				QueryRunnerSetAFilterSetB runnertwosets = new QueryRunnerSetAFilterSetB(
						args[0], querypoolA, querypoolB, wordsA);
				dab = runnertwosets.esitmate();

				// //
				if (dab >= db) {
					i--;
					continue;
				}
				// //

				Set<Integer> dabuniqueids = new HashSet<Integer>(runnertwosets
						.getUniqueDocumentIDs());
				// System.out.println("DAB:" + runnertwosets + "\n");

				// sheet创建一个工作页
				HSSFSheet sheetDAB = wb.createSheet("DAB");
				for (int j = 0; j < wordsA.size(); j++) {
					// HSSFRow,对应一行
					HSSFRow row = sheetDAB.createRow(j);

					HSSFCell cellWord = row.createCell(0);
					cellWord.setCellValue(wordsA.get(j));
					HSSFCell cellWeight = row.createCell(1);
					cellWeight
							.setCellValue(runnertwosets.getWeightofQueries()[j]);
				}

				// the following create a new sheet to save estimation
				HSSFSheet sheetEstimation = wb.createSheet("Estimation");

				HSSFRow rowDA = sheetEstimation.createRow(0);
				HSSFCell cellText = rowDA.createCell(0);
				cellText.setCellValue("DA");
				HSSFCell cellWeightA = rowDA.createCell(1);
				cellWeightA.setCellValue(runnerA.getMeanQueryWeight());
				HSSFCell cellEstimateDA = rowDA.createCell(2);
				cellEstimateDA.setCellValue(da);

				HSSFRow rowDB = sheetEstimation.createRow(1);
				cellText = rowDB.createCell(0);
				cellText.setCellValue("DB");
				HSSFCell cellWeightB = rowDB.createCell(1);
				cellWeightB.setCellValue(runnerB.getMeanQueryWeight());
				HSSFCell cellEstimateDB = rowDB.createCell(2);
				cellEstimateDB.setCellValue(db);

				HSSFRow rowDAB = sheetEstimation.createRow(2);
				cellText = rowDAB.createCell(0);
				cellText.setCellValue("DAB");
				HSSFCell cellWeightAB = rowDAB.createCell(1);
				cellWeightAB.setCellValue(runnertwosets.getMeanQueryWeight());
				HSSFCell cellEstimateDAB = rowDAB.createCell(2);
				cellEstimateDAB.setCellValue(dab);

				HSSFRow rowunique = sheetEstimation.createRow(3);
				cellText = rowunique.createCell(0);
				cellText.setCellValue("Sampled Doc");
				HSSFCell celluniqueDA = rowunique.createCell(1);
				celluniqueDA.setCellValue(dauniqueids.size());
				HSSFCell celluniqueDB = rowunique.createCell(2);
				celluniqueDB.setCellValue(dbuniqueids.size());
				HSSFCell celluniqueDAB = rowunique.createCell(3);
				celluniqueDAB.setCellValue(dabuniqueids.size());
				HSSFCell cellallD = rowunique.createCell(4);
				dabuniqueids.addAll(dauniqueids);
				dabuniqueids.addAll(dbuniqueids);
				cellallD.setCellValue(dabuniqueids.size());

				HSSFRow rowEstimate = sheetEstimation.createRow(4);
				cellText = rowEstimate.createCell(0);
				cellText.setCellValue("N");
				HSSFCell cellResult = rowEstimate.createCell(1);
				e_sizes[i] = (double) da / dab * db;
				cellResult.setCellValue(e_sizes[i]);

				meanvalue += e_sizes[i];

				OutputStream out = new FileOutputStream(args[5]
						+ java.io.File.separator + args[6] + "_"
						+ wordsA.size() + "_" + wordsB.size() + "_trail" + i
						+ ".xls");
				wb.write(out);
				out.close();
			}
			meanvalue = meanvalue / times;

			// output result to a file
			String content = "Mean Estimated Size:" + "\t" + meanvalue + "\n"
					+ "SD:" + "\t" + sd.evaluate(e_sizes);
			FileOutputStream fp = new FileOutputStream(args[5]
					+ java.io.File.separator + args[6] + "_" + wordsA.size()
					+ "_" + wordsB.size() + "_result" + ".txt");
			fp.write(content.getBytes("UTF-8"));
			fp.flush();
			fp.close();
			// System.out.println(meanvalue);
			// System.out.println(sd.evaluate(e_sizes));
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

}
