package yagoMarried;
import java.io.File;
import java.io.PrintStream;
import java.io.Reader;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.StringReader;
import java.util.Collections;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;

import org.apache.lucene.util.Version;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.QueryTermVector;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.ngram.NGramTokenizer;
import org.apache.lucene.analysis.TokenStream;

//add by lei yao
import java.io.FileWriter;
import java.io.BufferedWriter;
//////////////////////

public class IndexTACReader {
	private static final int NGRAM = 2;

	
	private IndexReader reader = null;
	private IndexSearcher searcher = null;

	private Analyzer analyzer = null;
	private String[] keyArray = null;
	
	public Map<String, Integer> name2id = null;
	private String indexLoc;

	private class NGramAnalyzer extends Analyzer {
		private int ngram = 0;
		
		public NGramAnalyzer(int ngram) {
			this.ngram = ngram;
		}
		
		public TokenStream tokenStream(String fieldName, Reader reader) {
			return new NGramTokenizer(reader, 2, ngram);
		}
	}

	public IndexTACReader() {
		analyzer = new NGramAnalyzer(NGRAM);
	}
	
	public IndexTACReader(String indexLoc) {
		this();
		this.indexLoc = indexLoc;
		loadIndex(indexLoc);
	}
	
	public String queryWikiContent(String wikititle){
		return this.queryEntityJSONIndex(wikititle);
	}

	/**
	 * This function gives an example about how to load the alias2entity alias.
	 * @param idxLoc
	 */
	private void loadIndex(String idxLoc) {
		Directory dir = null;
		
		try {
			dir = new RAMDirectory(new MMapDirectory(new File(idxLoc)));
			if (!IndexReader.indexExists(dir))
				return;
			
			reader = IndexReader.open(dir);

			//The keyArray is a mapping between the lucene internal id to the string in field "docID"
			//The mapping is stored here to improve the search performance.
			keyArray = FieldCache.DEFAULT.getStrings(reader, "docID");
		
			searcher = new IndexSearcher(reader);
			System.out.println("Load index finished ");
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	
	
	public String queryEntityJSONIndex(String entName) {
		String ret = null;
		
		try {
			//convert the id->string map to string->id map.
			if (name2id == null) {
				name2id = new HashMap<String, Integer>();
				for (int i = 0; i < keyArray.length; i++)
					name2id.put(keyArray[i], i);
			}
			
			if (!name2id.containsKey(entName))
				return null;
				
			int docId = name2id.get(entName);
			Document doc = reader.document(docId);
			Fieldable field = doc.getFieldable("content");
			ret = field.stringValue();
		} catch (Exception e) {
			e.printStackTrace();
		}
		
		return ret;
	}
	public static String convert(String s){
		if ( s.indexOf("\\u")==-1){
			return s;
		}
		
		StringBuffer ns = new StringBuffer();
		int i1=s.indexOf("\\u");
		int i2 = 0;
		while( i1!=-1 ){
			//System.out.println(ns.toString());
			ns.append(s.substring(i2, i1));
			String sb = s.substring(i1+2,i1+6);
			int intValue = Integer.parseInt(sb, 16);
			ns.append((char)intValue);
			i2 = i1+6;
			if (i2>=s.length())
				break;
			i1 = s.indexOf("\\u",i2);
			
		}
		//return null;
		return ns.toString();
	}
	
	public static void main(String[] args) {
		try {
			IndexTACReader r = new IndexTACReader("/largedata1/cmput696/students/lyao1/tacindex");
			String Query = args[0];
			Query = convert(Query);
			PrintStream out = new PrintStream(System.out, true, "UTF-8");
		    out.println("Query:"+Query);
			String content=r.queryWikiContent(Query);
			//String content=r.queryEntityJSONIndex("Mike_Quigley_(footballer)");
		//##TRY DAVID BECKHAM
			out.println(content);
			

		} catch (Exception e) {
			e.printStackTrace();
		}
	}
}

