package ca.uwindsor.cs.deepweb.estimation.method.or;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;

import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;

import ca.uwindsor.cs.deepweb.estimation.FilesContentIndexer;
import ca.uwindsor.cs.deepweb.estimation.method.border.TermCell;
import ca.uwindsor.cs.deepweb.estimation.method.border.TermWeightEstimatorThread;
import ca.uwindsor.cs.deepweb.utility.documentanalysis.TermFrequencyCell;

public class EstimatorRemoveTop2present {

	private String index;

	private String dict;

	private int estimatedsize;

	private long total;

	private ArrayList<TermCell<Float, Integer>> list;

	private IndexSearcher searcher;

	private QueryParser queryparser;

	private HashSet<String> unique;
	
	private Set<String> newid;

	public EstimatorRemoveTop2present(String indexdir, String termweightdffile) {
		index = indexdir;
		dict = termweightdffile;
		init();
	}

	protected void init() {
		TermWeightEstimatorThread e = new TermWeightEstimatorThread();
		try {
			e.load(dict);
			searcher = new IndexSearcher(index);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
			System.exit(0);
		}
		list = e.getWeightDFList();
		queryparser = new QueryParser(FilesContentIndexer.FIELD_CONTENT,
				new StandardAnalyzer());
		unique = new HashSet<String>();
		newid = new HashSet<String>();
	}

	public void run(int queries) throws Exception {
		Query query = null;
		Hits hits = null;
		
		total = 0;
		double OR;

		float exponentofOR = -1.1f;

		String terms[] = getQueries(queries);
		int offset;
		String id;
		unique.clear();
		
		for (int i = 0; i < terms.length; i++) {
			newid.clear();
//			query = queryparser.parse(terms[i]);
			query = new TermQuery(new Term(FilesContentIndexer.FIELD_CONTENT,terms[i].trim()));
			hits = searcher.search(query);

			for (offset = 0; offset < hits.length(); offset++) {
				id = hits.doc(offset).get(FilesContentIndexer.FIELD_ID);
				newid.add(id);
			}

			total += newid.size();
			unique.addAll(newid);

		}
		OR = total / (double) unique.size();
		estimatedsize = (int) (unique.size() / (1 - Math.pow(OR, exponentofOR)));
	}

	private String[] getQueries(int queries) {
		Random r = new Random();
		int size = list.size();
		int i;
		boolean flag = true;
		Set<Integer> random = new HashSet<Integer>(queries);
		while (flag) {
			random.add(r.nextInt(size));
			if (random.size() == queries) {
				flag = false;
			}
		}
		TermFrequencyCell tc;
		ArrayList<TermFrequencyCell> temp = new ArrayList<TermFrequencyCell>(
				queries);
		for (Integer index : random) {
			tc = new TermFrequencyCell(list.get(index).getTerm(), list.get(
					index).getDF());
			temp.add(tc);
		}
		Collections.sort(temp);
		int startposition = ((int) (queries * 0.02)==0)?1: (int) (queries * 0.02);
//		int startposition = 0;
		List<TermFrequencyCell> result = temp.subList(startposition, temp
				.size());
		String[] terms = new String[result.size()];
		for (i = 0; i < result.size(); i++) {
			terms[i] = result.get(i).getTerm();
		}
		return terms;
	}

	public int getEstimatedSize() {
		return this.estimatedsize;
	}

	public long getDFsum() {
		return this.total;
	}

	public void close() throws IOException {
		searcher.close();
	}

	/**
	 * @param args
	 * @throws IOException
	 */
	public static void main(String[] args) throws IOException {
		if (args.length != 2) {
			System.exit(-1);
		}

		ArrayList<Short> t = new ArrayList<Short>();
		// t.add(new Short((short) 10));
		 t.add(new Short((short) 50));
		t.add(new Short((short) 100));
		t.add(new Short((short) 200));
		t.add(new Short((short) 500));
		t.add(new Short((short) 1000));
		// t.add(new Short((short) 2000));
		// t.add(new Short((short) 5000));

		System.out.println("OR method: " + args[0]);
		int sizes = 0;
		long dfsums;
		EstimatorRemoveTop2present e = new EstimatorRemoveTop2present(args[0], args[1]);
		for (short t_ : t) {
			SummaryStatistics summary = new SummaryStatistics();
			double meanN = 0;

			for (int i = 0; i < 100; i++) {

				try {
					e.run(t_);
				} catch (Exception e1) {
					// TODO Auto-generated catch block
					e1.printStackTrace();
					System.exit(-1);
				}

				sizes = e.getEstimatedSize();
				dfsums = e.getDFsum();

				summary.addValue(sizes);
				meanN = meanN + (double) dfsums / 100;
				System.gc();

			}
			System.out.println(t_);
			System.out.println(summary.getMean());
			System.out.println(summary.getStandardDeviation());
			System.out.println(meanN);
		}
		e.close();
	}

}
