package myLucene;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;

import javax.imageio.stream.FileImageInputStream;

import matrics.SparseVector;

import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.FilterIndexReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultipleTermPositions;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.index.TermPositionVector;
import org.apache.lucene.index.TermPositions;
import org.apache.lucene.index.TermVectorMapper;
import org.apache.lucene.index.TermVectorOffsetInfo;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.regex.SpanRegexQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.search.spans.Spans;
import org.apache.lucene.search.spell.LuceneDictionary;
import org.apache.lucene.store.instantiated.InstantiatedDocument;
import org.apache.lucene.store.instantiated.InstantiatedTermPositionVector;
import org.apache.lucene.util.Version;

import vectorConstruction.IdiomVectorConstruction;

public class Searcher {

	public static void searchIdioms(IndexReader iReader, SpanNearQuery sq,
			String[] intToTerms) throws Exception {
		long searchStart = System.currentTimeMillis();

		// SpanQuery query = (SpanQuery) new SpanRegexQuery(new Term("content",
		// "die")).rewrite(iReader);
		// SpanNearQuery nQuery = new SpanNearQuery(new SpanQuery[] {
		// new SpanTermQuery(new Term("lemma", "vor")),
		// new SpanTermQuery(new Term("lemma", "Hund")),
		// new SpanTermQuery(new Term("lemma", "gehen")) }, 100, true);

		System.out.println(sq.toString());
		Spans spans = sq.getSpans(iReader);

		int numberOfInst = 0;
		int[] tf = new int[Props.dictSize];
		LinkedList<Integer>[] dist = Searcher.iniListArray();
		while (spans.next() && numberOfInst < Integer.MAX_VALUE) {
			System.out.println("Searcher, HitNumber: "
					+ spans.doc()
					+ " "
					+ Lucene.getDocToString(iReader.document(spans.doc()),
							"content"));

			TermFre.getTF(iReader, spans.doc(), spans.start(), spans.end(), tf,
					dist, intToTerms);
			numberOfInst++;
		}
		System.out.println(">>>>>>>>> number of Spans "
				+ numberOfInst+" "
				+ IdiomVectorConstruction.queryToString(sq.toString(),
						intToTerms));

		String filename = IdiomVectorConstruction.queryToString(sq.toString(),
				intToTerms);

		TfIdfOld.writeTf(tf, filename, intToTerms);
		System.out.println("Length tf map :" + tf.length);
		System.out.println("Number of instances " + numberOfInst);
		long searchEnd = System.currentTimeMillis();
		System.out.println("Search End: "
				+ GetTime.getDuration(searchStart, searchEnd));
		TfIdfOld.getTfIdf(tf, iReader, intToTerms);
		String fileNTF = SerializeArray.write(tf, Props.writeToPath
				+ "/TF/"
				+ IdiomVectorConstruction.queryToString(sq.toString(),
						intToTerms) + "_tf_"+Props.windowSize+"_.data");
		String file = SerializeLinkedListArray.write(dist, Props.writeToPath
				+ "/Dist/"
				+ IdiomVectorConstruction.queryToString(sq.toString(),
						intToTerms) + "_dist_"+Props.windowSize+"_.data");
	}

	@SuppressWarnings("unchecked")
	public static LinkedList[] iniListArray() {
		LinkedList<Integer>[] dist = new LinkedList[Props.dictSize];
		for (int x = 0; x < dist.length; x++) {
			dist[x] = new LinkedList<Integer>();
		}
		return dist;

	}

	@SuppressWarnings("unchecked")
	public static void searchVerbs(String verb, IndexReader iReader,
			String[] intToTerms) throws Exception {
		int verbAsInt = Integer.parseInt(verb);
		long searchStart = System.currentTimeMillis();
		System.out.println("Hier");
		int[] tf = new int[Props.dictSize];
		System.out.println("tf ready");
		// LinkedList<Integer>[] dist = Searcher.iniListArray();
		LinkedList<Integer>[] dist = Searcher.iniListArray();
		System.out.println("dist ready");
		// QueryParser queryParser = new QueryParser(Version.LUCENE_30,
		// Props.queryField, new StandardAnalyzer(Version.LUCENE_30));
		// Query query2 = queryParser.parse(verb);
		//
		//
		// IndexSearcher searcher = new IndexSearcher(iReader);
		//
		// TopDocs td = searcher.search(query2, Integer.MAX_VALUE);
		//
		// ScoreDoc[] hits = td.scoreDocs;
		QueryParser queryParser = new QueryParser(Version.LUCENE_30,
				Props.queryField, new StandardAnalyzer(Version.LUCENE_30));
		Query query2 = queryParser.parse(verb);
		IndexSearcher searcher = new IndexSearcher(iReader);
		int df = TermFre.getDF(iReader, verb);
		TopDocs td = null;
		if (df > 50000) {
			td = searcher.search(query2, 50000);
		} else {
			td = searcher.search(query2, TermFre.getDF(iReader, verb) + 10000);
		}

		ScoreDoc[] hits = td.scoreDocs;
		int numberOfInst = 0;
		System.out.println("!!!!!!!!!!!!!!!!!Searcher number of Hits "
				+ hits.length + " " + verb);
		for (int x = hits.length; x > 0; x--) {
			System.out.println("Searcher number " + (x - 1) + " "
					+ (hits.length - x));
			ScoreDoc sc = hits[x - 1];
			// for(int xx =0 ;xx <=td.totalHits;xx++){
			// for(int a =0 ; a<searcher.explain(query2,
			// sc.doc).getDetails().length;a++){
			System.out.println(sc.doc + " "
					+ searcher.explain(query2, sc.doc).getDetails()[1]);

			// }
			// }

			// System.out.println(Lucene.getDocToString(iReader.document(sc.doc),
			// "lemma"));

			TermFreqVector tfvector = iReader.getTermFreqVector(sc.doc,
					Props.queryField);
			TermPositionVector tpvector = (TermPositionVector) tfvector;
			int termidx = tfvector.indexOf(verb);
			int[] termposx = tpvector.getTermPositions(termidx);
			// System.out.println("Searcher, number of occurences "
			// + termposx.length + " in sentence :" + hits[x - 1]);
			for (int j = 0; j < termposx.length; j++) {
				System.out.println("Searcher, termpos : " + termposx[j]);

				TermFre.getTF(iReader, sc.doc, termposx[j], termposx[j], tf,
						dist, intToTerms);

			}

			numberOfInst = numberOfInst + termposx.length;

		}

		// Vector<String> v = new Vector<String>(tf.keySet());
		// Collections.sort(v);
		// for (String i : v) {
		// System.out.println(tf.get(i) + " " + i);
		// }
		System.out.println("Length tf map :" + tf.length);
		System.out.println("Number of instances " + numberOfInst);
		long searchEnd = System.currentTimeMillis();
		System.out.println("Search End: "
				+ GetTime.getDuration(searchStart, searchEnd));

		SparseVector sp = TfIdfOld.getTfIdf(tf, iReader, intToTerms);
		String fileNTF = SerializeArray.write(tf, Props.writeToPath + "/TF/"
				+ verb + "_" + intToTerms[verbAsInt] +"_"+Props.windowSize+ "_tf_.data");
		String file = SerializeLinkedListArray
				.write(dist, Props.writeToPath + "/Dist/" + verb + "_"
						+ intToTerms[verbAsInt] + "_"+Props.windowSize+"_dist_.data");
		// TfIdf.writeTf(tf, verb, intToTerms);
		// PrintDist.printDist(dist,intToTerms[Integer.parseInt(verb)] ,
		// intToTerms);

	}

	/***
	 * @deprecated
	 * @return
	 * @throws IOException
	 *             sdd
	 */
	public static HashMap<String, Integer> termsToInt() throws IOException {
		HashMap<String, Integer> terms = new HashMap<String, Integer>();
		long start = System.currentTimeMillis();

		System.out.println("Start write terms to int..." + GetTime.getTime());

		BufferedReader br = new BufferedReader(new FileReader(new File(
				"/local/joofeit/Terms")));
		String line = "";
		while ((line = br.readLine()) != null) {
			String[] lineArray = line.split(" ");
			if (lineArray.length == 2) {
				terms.put(lineArray[0], Integer.parseInt(lineArray[1]));
			}
		}
		System.out.println("Time needed  terms to int ... "
				+ GetTime.getDuration(start, System.currentTimeMillis()));

		return terms;
	}

	public static HashMap<Integer, String> intToTerms(
			HashMap<String, Integer> termsToInt) throws Exception {
		HashMap<Integer, String> intToTerms = new HashMap<Integer, String>();
		BufferedReader br = new BufferedReader(new FileReader(new File("")));
		String line = "";

		while ((line = br.readLine()) != null) {
			if (line.split(" ").length == 2) {
				intToTerms.put(Integer.parseInt(line.split(" ")[0]), line
						.split(" ")[1]);
			} else {
				System.err
						.println("Searcher; intToTerms : line not well formed >"
								+ line + "<");
			}
		}
		return intToTerms;
	}

	public static void makeDir(Integer verb, String[] intToTerms) {
		boolean success1 = (new File(Props.writeToPath + "/IDF/"
				+ intToTerms[verb] + "/TF_" + intToTerms[verb])).mkdir();
		boolean success2 = (new File(Props.writeToPath + "/IDF/"
				+ intToTerms[verb] + "/DIST_" + intToTerms[verb])).mkdir();

	}

	// public static DataOutputStream makeFileDist(Integer verb, Integer word,
	// String[] intToTerms) throws NumberFormatException,
	// FileNotFoundException {
	// String path = "/local/joofeit/IDF/" + intToTerms[verb] + "/DIST_"
	// + intToTerms[verb] + "/" + intToTerms[verb] + "_dist";
	// // String path = "/local/joofeit/IDF/" + intToTerms[verb] + "/DIST_"
	// // + intToTerms[verb] + "/" + word + "_dist" + "_"
	// // + intToTerms[verb];
	// DataOutputStream dataOut = null;
	// File file = new File(path);
	// if (new File(path).exists()) {
	// dataOut = new DataOutputStream(new BufferedOutputStream(
	// new FileOutputStream(file, true)));
	// } else {
	// dataOut = new DataOutputStream(new BufferedOutputStream(
	// new FileOutputStream(file)));
	// }
	// return dataOut;
	// }

	public static void writeToFileInterTF(Integer verb,
			HashMap<Integer, Integer> map, String[] intToTerms)
			throws IOException {
		long start = System.currentTimeMillis();

		File file = null;
		BufferedOutputStream os = null;
		DataOutputStream dataOut = null;
		for (Integer word : map.keySet()) {
			String path = "/local/joofeit/IDF/" + intToTerms[verb] + "/TF_"
					+ intToTerms[verb] + "/" + word + "_tf" + "_"
					+ intToTerms[verb];
			dataOut = null;
			file = new File(path);
			os = new BufferedOutputStream(new FileOutputStream(file, true));
			dataOut = new DataOutputStream(new BufferedOutputStream(
					new FileOutputStream(file, true)));
			dataOut.write(map.get(word).intValue());
			os.close();
			dataOut.close();

		}
		os.close();
		dataOut.close();
		System.out.println("writeToFileInterTF : "
				+ GetTime.getDuration(start, System.currentTimeMillis()));

	}

	// public static void writeToFileInterTF(Integer verb,
	// HashMap<Integer, Integer> map, String[] intToTerms)
	// throws NumberFormatException, IOException {
	// String path = "/local/joofeit/IDF/" + intToTerms[verb] + "/TF_"
	// + intToTerms[verb] + "/" + intToTerms[verb] + "_tf";
	// BufferedWriter bw = new BufferedWriter(new FileWriter(new
	// File(path),true));
	// for (Integer word : map.keySet()) {
	// bw.write(word+" "+map.get(word));
	//		
	// bw.write("\n");
	// }
	// bw.flush();
	// bw.close();
	// }

	public static void writeToFileInterDist(Integer verb,
			HashMap<Integer, LinkedList<Integer>> map, String[] intToTerms)
			throws NumberFormatException, IOException {
		long start = System.currentTimeMillis();
		File file = null;
		BufferedOutputStream os = null;
		DataOutputStream dataOut = null;
		for (Integer word : map.keySet()) {
			String path = "/local/joofeit/IDF/" + intToTerms[verb] + "/DIST_"
					+ intToTerms[verb] + "/" + word +"_"+Props.windowSize+"_dist" + "_"
					+ intToTerms[verb];

			file = new File(path);
			os = new BufferedOutputStream(new FileOutputStream(file, true));
			dataOut = new DataOutputStream(new BufferedOutputStream(
					new FileOutputStream(file, true)));

			for (Integer pos : map.get(word)) {
				dataOut.write(pos.intValue());
			}

			dataOut.close();
			os.close();

		}
		dataOut.close();
		os.close();
		System.out.println("writeToFileInterDist : "
				+ GetTime.getDuration(start, System.currentTimeMillis()));
	}

	// public static void writeToFileInterDist(Integer verb,
	// HashMap<Integer, LinkedList<Integer>> map, String[] intToTerms)
	// throws NumberFormatException, IOException {
	// String path = "/local/joofeit/IDF/" + intToTerms[verb] + "/DIST_"
	// + intToTerms[verb] + "/" + intToTerms[verb] + "_dist";
	// BufferedWriter bw = new BufferedWriter(new FileWriter(new
	// File(path),true));
	// for (Integer word : map.keySet()) {
	// bw.write(word);
	// for(Integer dist: map.get(word)){
	// bw.write(" "+dist);
	// }
	// bw.write("\n");
	// }
	// bw.flush();
	// bw.close();
	// }

	public static HashMap<Integer, Integer> getTFMap(Integer verb,
			String[] intToTerms) throws IOException {
		HashMap<Integer, Integer> map = new HashMap<Integer, Integer>();
		File dir = new File("/local/joofeit/IDF/" + intToTerms[verb] + "/TF_"
				+ intToTerms[verb]);
		String[] files = dir.list();
		BufferedInputStream is = null;
		DataInputStream in = null;
		for (String f : files) {

			int counter = 0;
			is = new BufferedInputStream(new FileInputStream(new File(dir
					.getAbsoluteFile()
					+ "/" + f)));
			in = new DataInputStream(is);
			while (true) {
				try {
					counter = counter + in.readInt();
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
					break;
				}

			}
			is.close();
			in.close();
			map.put(Integer.parseInt(f.split("_")[0]), counter);

		}
		return map;
	}
}