package ca.uwindsor.cs.deepweb.estimation.method.capturehistory;

import java.io.FileOutputStream;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;

import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;

import ca.uwindsor.cs.deepweb.estimation.DataCollectorWithLimitationTop;
import ca.uwindsor.cs.deepweb.estimation.FilesContentIndexer;
import ca.uwindsor.cs.deepweb.estimation.RandomWordSelector;

public class RandomQueryReturnTop {
	protected short wordsquantity;
	protected HashSet<String> unique;
	protected int total;
	protected int marked;
	protected int corpussize;
	protected int maxstepsize;
	protected Random r;
	protected StringBuilder result;
	protected String corpusindex;
	protected String dict;
	protected int topsize;

	private BigInteger nominator;
	private BigInteger denominator;
	private int CHesimtaed;
	private int numberofqueriesaccounted;

	/**
	 * @param corpusindex
	 * @param dict
	 * @param times
	 * @param topsize
	 * 
	 */
	public RandomQueryReturnTop(String corpusindex, String dict,
			short wordsquantity, int topsize) {
		this.corpusindex = corpusindex;
		this.dict = dict;
		this.wordsquantity = wordsquantity;
		this.topsize = topsize;
		result = new StringBuilder();
		// r = new Random();
		unique = new HashSet<String>();
		nominator = new BigInteger("0");
		denominator = new BigInteger("0");
	}

	public void query() throws Exception {
		IndexSearcher searcher = new IndexSearcher(corpusindex);
		Query query = null;
		Hits hits = null;
		QueryParser queryparser = new QueryParser(
				FilesContentIndexer.FIELD_CONTENT, new StandardAnalyzer());

		RandomWordSelector selector = new RandomWordSelector(dict, "UTF-8");

		Set<String> newid;
		HashSet<String> duplicated = new HashSet<String>();

		DataCollectorWithLimitationTop datacollector = new DataCollectorWithLimitationTop(
				FilesContentIndexer.FIELD_ID, topsize);
		numberofqueriesaccounted = 0;
		for (int i = 0; i < wordsquantity; i++) {
			String p = selector.getRandomWord();

			query = queryparser.parse(p);
			hits = searcher.search(query);
			// /
			if (hits.length() < 20) {
				// i--;
				continue;
			}
			// /
			numberofqueriesaccounted++;
			newid = datacollector.add(hits);
			total += newid.size();

			duplicated.clear();
			duplicated.addAll(newid);
			duplicated.retainAll(unique);
			marked = duplicated.size();

			unique.addAll(newid);

			//
			nominator = nominator.add(new BigInteger(Integer.toString(newid
					.size())).multiply(new BigInteger(Integer.toString(unique
					.size())).pow(2)));
			denominator = denominator.add(new BigInteger(Integer
					.toString(marked)).multiply(new BigInteger(Integer
					.toString(unique.size()))));
			if (denominator.intValue() != 0) {
				CHesimtaed = nominator.divide(denominator).intValue();
			} else {
				CHesimtaed = 0;
			}
			//

			result.append(p + "\t" + newid.size() + "\t" + duplicated.size()
					+ "\t" + unique.size() + "\t" + CHesimtaed + "\n");
		}
		searcher.close();
	}

	public int getEstimatedSize() {
		return CHesimtaed;
	}

	public int getUnique() {
		return unique.size();
	}

	public int getTotal() {
		return total;
	}

	public String toString() {
		return result.toString();
	}

	public BigInteger getNominator() {
		return this.nominator;
	}

	public BigInteger getDenominator() {
		return this.denominator;
	}

	public int getNumberofQueriesIssued() {
		return this.numberofqueriesaccounted;
	}

	public static void main(String[] args) {
		if (args.length != 3) {
			System.exit(-1);
		}
		ArrayList<Integer> t = new ArrayList<Integer>();

		t.add(10000);
		t.add(25000);
		t.add(50000);
		for (int wordsquantity : t) {
			try {
				System.out.println("CH-Reg: " + args[0]);
				StringBuilder sb = new StringBuilder();
				// SummaryStatistics summary = new SummaryStatistics();
				BigInteger meanDF = new BigInteger("0");
				for (int i = 0; i < 100; i++) {
					RandomQueryReturnTop rs = new RandomQueryReturnTop(args[0],
							args[1], (short) wordsquantity, 10);
					rs.query();
					sb.append(rs.getNumberofQueriesIssued() + "\t"
							+ rs.getUnique() + "\t" + rs.getTotal() + "\t"
							+ rs.getNominator() + "\t" + rs.getDenominator()
							+ "\t" + rs.getEstimatedSize() + "\n");
					// summary.addValue(rs.getEstimatedSize());
					meanDF = meanDF.add(new BigInteger(Long.toString(rs
							.getTotal())));
					System.gc();
					if (i % 10 == 0) {
						System.out.println("check point");
					}
				}
				FileOutputStream fp;
				fp = new FileOutputStream(args[2]+wordsquantity);
				fp.write(sb.toString().getBytes("UTF-8"));
				fp.flush();
				fp.close();
				// System.out.println(summary.getMean());
				// System.out.println(summary.getStandardDeviation());
				// System.out.println(meanDF
				// .divide(new BigInteger(Long.toString(100))));
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		// System.out.println(rs.toString());
	}
}
