package util.ir;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;

import util.io.FileInput;

public class SearchLanguageModel {

	static final File INDEX_DIR = new File("index");
	private static boolean deleting = false; // true during deletion pass
	private static IndexReader reader; // existing index
	private static IndexWriter writer; // new index being built
	String index_path = "/home/sergio/index/LM_trigrams";

	private static String fields_trigrams[] = { "a", "b", "c", "f_t", "f_seed",
			"f_12", "f_23", "f_13" };

	private static String fields_trigrams_simple[] = { "trigram", "f_t",
			"f_seed", "f_12", "f_23", "f_13" };

	private static String fields_bigrams[] = { "a", "b", "f_b", "f_seed",
			"f_1", "f_2" };
	private static String fields_bigrams_simple[] = { "a", "b", "f_b",
			"f_seed", "f_1", "f_2" };

	static String index = "";

	private static void indexbigrams(String path) throws CorruptIndexException,
			LockObtainFailedException, IOException {

		writer = new IndexWriter(index, new StandardAnalyzer(), true);

		FileInput in = new FileInput(path);
		String line = in.readString();

		while (line != null) {

			Document doc = new Document();

			String fields[] = line.split("\t");
			String a = fields[0];
			String b = fields[1];

			String freq = fields[2];
			String freq_seed = fields[3];
			String freq_12 = fields[4];
			String freq_23 = fields[5];

			doc
					.add(new Field("a", a, Field.Store.YES,
							Field.Index.NOT_ANALYZED));

			doc
					.add(new Field("b", b, Field.Store.YES,
							Field.Index.NOT_ANALYZED));

			// store frequencies
			doc.add(new Field("f_b", freq, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_seed", freq_seed, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_1", freq_12, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_2", freq_23, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			System.out.println("indexing.. " + line);
			writer.addDocument(doc); // add docs unconditionally

			line = in.readString();

		}
		System.out.println("Optimizing...");
		writer.optimize();
		writer.close();
		System.out.println("done!");
	}

	private static void indextrigrams(String path)
			throws CorruptIndexException, LockObtainFailedException,
			IOException {

		writer = new IndexWriter(index, new StandardAnalyzer(), true);

		FileInput in = new FileInput(path);
		String line = in.readString();

		while (line != null) {

			Document doc = new Document();

			String fields[] = line.split("\t");
			String a = fields[0];
			String b = fields[1];
			String c = fields[2];
			String freq = fields[3];
			String freq_seed = fields[4];
			String freq_12 = fields[5];
			String freq_23 = fields[6];
			String freq_13 = fields[7];

			doc
					.add(new Field("a", a, Field.Store.YES,
							Field.Index.NOT_ANALYZED));

			doc
					.add(new Field("b", b, Field.Store.YES,
							Field.Index.NOT_ANALYZED));

			doc
					.add(new Field("c", c, Field.Store.YES,
							Field.Index.NOT_ANALYZED));

			// store frequencies
			doc.add(new Field("f_t", freq, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_seed", freq_seed, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_12", freq_12, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_23", freq_23, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			doc.add(new Field("f_13", freq_13, Field.Store.YES,
					Field.Index.NOT_ANALYZED));

			System.out.println("indexing.. " + line);
			writer.addDocument(doc); // add docs unconditionally

			line = in.readString();

		}
		System.out.println("Optimizing...");
		writer.optimize();
		writer.close();
		System.out.println("done!");
	}

	public static String createOrQuery(String fields, String seeds[]) {

		String special_seed = "";

		for (int i = 0; i < seeds.length; i++) {

			special_seed = special_seed + fields + ":" + seeds[i] + " ";

		}

		special_seed = "(" + special_seed.trim() + ")";

		return special_seed;
	}

	public static HashSet<String[]> trigramSearch(String index, String t[],
			String fields[]) throws CorruptIndexException, IOException,
			ParseException {

		IndexSearcher searcher = new IndexSearcher(index);

		Analyzer analyzer = new StandardAnalyzer();
		QueryParser queryParser = new QueryParser("<default field>", analyzer);
		// <default field> is the field that QueryParser will search if you
		// don't
		// prefix it with a field.

		String queries[] = new String[fields.length];

		for (int i = 0; i < queries.length; i++) {

			queries[i] = createOrQuery(fields[0], t);

		}

		// Matching

		Hits hits = searcher.search(queryParser.parse(queries[0]));
		HashSet<String[]> response = new HashSet<String[]>();

		for (int i = 0; i < hits.length(); i++) {

			Document doc = hits.doc(i);

			String r[] = new String[fields_trigrams.length];
			String temp = "";
			for (int j = 0; j < fields_trigrams.length; j++) {
				r[j] = doc.get(fields_trigrams[j]);
				temp = temp + r[j] + "\t";

			}

			// System.out.println(temp);

			response.add(r);

		}

		// swap answers here c for b and b for c
		hits = searcher.search(queryParser.parse(queries[1]));

		for (int i = 0; i < hits.length(); i++) {

			Document doc = hits.doc(i);

			String r[] = new String[fields_trigrams.length];
			String temp = "";

			String b_temporal = "";
			for (int j = 0; j < fields_trigrams.length; j++) {

				if (fields_trigrams[j].equals("b")) {
					b_temporal = doc.get(fields_trigrams[j]);
					r[j] = doc.get("c");

				} else if (fields_trigrams[j].equals("c")) {
					r[j] = b_temporal;

				} else {
					r[j] = doc.get(fields_trigrams[j]);

				}

				temp = temp + r[j] + "\t";

			}

			// System.out.println(temp);

			response.add(r);

		}
		System.out.println(response.size());
		searcher.close();
		return response;

	}

	public static HashSet<String[]> trigramSearchSimple(String index,
			String t[]) throws CorruptIndexException, IOException,
			ParseException {

		RAMDirectory dir = new RAMDirectory(index);
		IndexSearcher searcher = new IndexSearcher(dir);

		Analyzer analyzer = new WhitespaceAnalyzer();
		QueryParser queryParser = new QueryParser("trigram", analyzer);
		// <default field> is the field that QueryParser will search if you
		// don't
		// prefix it with a field.

		String query = "";

		for (int i = 0; i < t.length; i++) {

			query = query + t[i] + "\t";

		}

		query = query.trim();

		// Matching

		Hits hits = searcher.search(queryParser.parse(query));
		HashSet<String[]> response = new HashSet<String[]>();

		for (int i = 0; i < hits.length(); i++) {

			Document doc = hits.doc(i);

			String r[] = new String[fields_trigrams_simple.length];
			String temp = "";
			for (int j = 0; j < fields_trigrams_simple.length; j++) {
				r[j] = doc.get(fields_trigrams_simple[j]);
				temp = temp + r[j] + "\t";

			}

			// System.out.println(temp);

			response.add(r);

		}

		// swap answers here c for b and b for c
		System.out.println(response.size());
		searcher.close();
		return response;

	}

	public static void standardSearch(ArrayList<String> tags, String field[],
			String index, HashSet<String[]> set, int target_pos)
			throws ParseException, CorruptIndexException, IOException {

		IndexSearcher searcher = new IndexSearcher(index);
		BooleanQuery booleanQuery = new BooleanQuery();

		for (int i = 0; i < tags.size(); i++) {

			BooleanQuery subquery = new BooleanQuery();
			for (int j = 0; j < field.length; j++) {
				String tag = tags.get(i);
				Query query1 = new TermQuery(new Term(field[j], tag));
				subquery.add(query1, BooleanClause.Occur.SHOULD);
			}

			booleanQuery.add(subquery, BooleanClause.Occur.MUST);
		}
		// Use BooleanClause.Occur.MUST instead of BooleanClause.Occur.SHOULD
		// for AND queries
		Hits hits = searcher.search(booleanQuery);

		System.out.println("> "); // start with '> '

		for (int i = 0; i < hits.length(); i++) {

			Document doc = hits.doc(i);

			String r = "";
			for (int j = 0; j < field.length; j++) {
				String f = field[j];

				String temp = doc.get(f);
				r = r + "\t" + temp;

			}

			r = r + "\t" + doc.get("f");
			// System.out.println(r.trim());

		}

		System.out.println("\n" + hits.length() + " matching documents");

		searcher.close();

	}

	// static IndexSearcher searcher=null;

	/**
	 * 1 index/query 2 index path 3 query
	 */

	static public void main(String[] args) throws CorruptIndexException,
			LockObtainFailedException, IOException, ParseException {
		// TODO Auto-generated method stub

		String path = "/home/sergio/delicious_index/trigrams_index_filter2";

		String fields[] = { "b", "c" };

		String t[] = { "a", "bb", "youtube", "videos", "games", "social",
				"media", "songs" };
		trigramSearchSimple(path, t);

	}

}
