package ca.uwindsor.cs.deepweb.utility;

import java.io.IOException;
import java.util.ArrayList;

import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.TermFreqVector;

import ca.uwindsor.cs.deepweb.estimation.FilesContentIndexer;

public class IndexSummary {

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		if (args.length != 2) {
			System.out
					.println("java -jar LuceneIndexReader.jar indexDir outputDir");
			System.exit(0);
		}
		try {
			IndexReader sizeindexreader = IndexReader.open(args[0]);
			int numDocs = sizeindexreader.numDocs();
			Document doc;
			double size, max = 0, min = 100000, sizesd, sizeavg,wordssd,wordsavg;
			ArrayList<Double> sizelist = new ArrayList<Double>(numDocs);
			for (int i = 0; i < numDocs; i++) {
				doc = sizeindexreader.document(i);
				size = Double.parseDouble(doc
						.get(FilesContentIndexer.FIELD_SIZE));
				max = (size >= max) ? size : max;
				min = (size <= min) ? size : min;
				sizelist.add(size);
			}
			double[] temp = new double[sizelist.size()];
			int i;
			for (i = 0; i < temp.length; i++) {
				temp[i] = sizelist.get(i);
			}
			sizesd = StandardDeviation.sdKnuth(temp);
			sizeavg = StandardDeviation.avg;
			
			sizeindexreader.close();
			
			IndexReader indexreader = IndexReader.open(args[1]);
			numDocs = indexreader.numDocs();
			TermFreqVector termFreqVector;
			sizelist = new ArrayList<Double>(numDocs);
			for (i = 0; i < numDocs; i++) {
				termFreqVector = indexreader.getTermFreqVector(i,
						FilesContentIndexer.FIELD_CONTENT);
				if (termFreqVector == null) {
					System.out.println("termFreqVector is null.");
					continue;
				}
				sizelist.add((double) termFreqVector.getTerms().length);
			}
			temp = new double[sizelist.size()];
			for (i = 0; i < temp.length; i++) {
				temp[i] = sizelist.get(i);
			}
			wordssd = StandardDeviation.sdKnuth(temp);
			wordsavg = StandardDeviation.avg;
			indexreader.close();
			
			System.out.print("Sogou Web Corpus 2M & " + numDocs + " &");
			System.out.print(" " + (int) sizeavg + " &");
			System.out.print(" " + (int) sizesd + " &");
//			System.out.print(" " + (int) sizeavg + " &");
			System.out
					.print(" " + (int) min + " - " + (int) max + " &");
			System.out.print(" " + (int) wordsavg + " &");
			System.out.print(" " + (int) wordssd + " \\");
		} catch (CorruptIndexException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
			System.exit(-1);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
			System.exit(-1);
		}
		System.out.println("finished");
	}

}
