package myLucene;

import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Vector;

import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;

import matrics.SparseVector;

/**
 * 
 * @author joo
 * 
 */
public class TfIdfOld {
	/**
	 * @deprecated
	 * @param tf
	 * @param iReader
	 * @param intToTerms
	 * @return
	 * @throws IOException
	 */
	public static SparseVector getTfIdf(int[] tf, IndexReader iReader,
			String[] intToTerms) throws IOException {
		long timeS = System.currentTimeMillis();
		System.out.println("Start TfIdf calculation ... " + GetTime.getTime());

		SparseVector sp = new SparseVector(Props.dictSize);

		for (int term = 0; term < tf.length; term++) {
			if (tf[term] > 0) {
				double idf = Math.log10((double) iReader.maxDoc()
						/ (double) iReader.docFreq(new Term(Props.queryField,
								String.valueOf(term))));

				// double test = iReader.maxDoc()
				// / iReader.docFreq(new Term(Props.queryField, String
				// .valueOf(term)));

				sp.put(term, idf * tf[term]);
				// System.out.println("TfIdf : "
				// + intToTerms[term]
				//
				// + " >"
				// + idf
				// + " tf>"
				// + tf.get(term)
				// + " "
				// + iReader.docFreq(new Term(Props.queryField, String
				// .valueOf(term))) + " " + iReader.maxDoc());
			}
		}
		System.out.println("TfIdf calculation done, time needed: "
				+ GetTime.getDuration(timeS, System.currentTimeMillis()));
		return sp;
	}

	public static void writeTf(int[] tf, String filename, String[] intToTerms)
			throws Exception {
		long start = System.currentTimeMillis();
		System.out.println("Start writeTfIdf..." + GetTime.getTime());

		System.out.println(TfIdfOld.class.getName() + "PATH " + filename
				+ "_tfidf");
		// DataOutputStream dataOut = new DataOutputStream(
		// new BufferedOutputStream(new FileOutputStream(new File(//
		// "/local/joofeit/IDF/mai/idf_mai"))));
		// "/local/joofeit/IDF/"
		// + intToTerms[(Integer.parseInt(filename))]
		// + "/"
		// + intToTerms[(Integer.parseInt(filename))]
//		// + "_tfidf"))));
//		boolean success2 = (new File(Props.writeToPath + "/IDF/"
//				+ filename.split("__")[1]).mkdir());
		BufferedWriter dataOutTest = new BufferedWriter(new FileWriter(
				new File("/local/joofeit/idiomTF/" +filename +"_"+Props.windowSize+ "_test")));

		int vLength = tf.length;

		for (int counter = 0; counter < tf.length; counter++) {
			dataOutTest.write(tf[counter] + " " + intToTerms[counter] + " "
					+ counter + "\n");

		}
		dataOutTest.flush();
		dataOutTest.close();
		// dataOut.flush();
		// dataOut.close();
		System.out.println("Writing tfidf to memory done, time needed: "
				+ GetTime.getDuration(start, System.currentTimeMillis()));

	}

	public static String[] getIntToTerms() throws Exception {
		System.out.println("Start IntToTerms: " + GetTime.getTime());
		long start = System.currentTimeMillis();
		BufferedReader br = new BufferedReader(new FileReader(new File(
				"/local/joofeit/termsToIntPunt")));
		String[] termsToInt = new String[Props.dictSize];
		String line = "";
		while ((line = br.readLine()) != null) {
			if (line.split(" ").length != 2) {
				throw new Exception(TfIdfOld.class.getName()
						+ " "
						+ Thread.currentThread().getStackTrace()[1]
								.getMethodName());
			} else {
				termsToInt[(Integer.parseInt(line.split(" ")[0]))] = line
						.split(" ")[1];
			}

		}
		System.out.println(TfIdfOld.class.getName()
				+ " Time need for termsToInt "
				+ GetTime.getDuration(start, System.currentTimeMillis()));
		br.close();
		return termsToInt;
	}
}