package util.localLaunchers;

import java.io.IOException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedList;

import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.schema.Schema;

import udf.matching.GenerateQueryTagsGeneral;
import udf.string.CleanTag;
import util.io.FileInput;

public class GenerateBookmarks {
public 	CleanTag cleaner = null;

	public  GenerateBookmarks(String stop_words, String vocabulary){
		try {
			init(vocabulary);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		//try {
			cleaner =new CleanTag(stop_words,false);
	//	} catch (IOException e) {
			// TODO Auto-generated catch block
		//	e.printStackTrace();
		//}
		
		
	}

	private HashSet<String> voc = new HashSet<String>();
	//private Hashtable<String, String> simple = new Hashtable<String, String>();

	// static ArrayList<String> mTables = new ArrayList<String>();

	// static private final String domain_path =
	// "/user/duartes/helpers/domains.txt";

	/**
	 * Tuple fields
	 * 
	 * 0 set of tags of the bookmark 1 set of tags hpyhenized (dictionary)
	 * 
	 * 
	 * @param tuple
	 * @throws IOException
	 */

	/**
	 * Position 0 : a_b Position 1 : freq
	 * 
	 * 
	 */

	
	
	public void init(String vocabulary) throws IOException {

		
		FileInput in = new FileInput(vocabulary);
		String line =in.readString();
		while(line!=null){
			
			
			String t[]=line.split("\t");
			
			voc.add(t[0].trim());
			line =in.readString();
			
		}
	}

	
	
	/**
	 * Improvement: ADD PMI RATIO THRESHOLD TO DECIDE WHEN TO ADD BIGRAM
	 * OR WHEN TO ADD BIGRAM AND ADD SINGLE TOKENS AS WELL TO THE HASH
	 * 
	 * 
	 * 
	 * @param query
	 * @param title
	 * @param description
	 * @param threshold
	 * @return
	 */
	
	public void getTags(String query, String r, String title,
			String description, int threshold1,Hashtable<String, Integer> list) {
	
		
	
		LinkedList<String> temporal = splitText(query);
		//for(int i =0; i< temporal.size(); i++){
	
		//	System.out.println(temporal.get(i));
		//}
		updateHash(query, r,list, temporal);

		//System.out.println("Hash frequency:");
		//printHash(suggestions);
		
		
		temporal = splitText(title);

		//System.out.println("Tags of  title:" +title);
		
		///for(int i =0; i< temporal.size(); i++){
	
		//	System.out.println(temporal.get(i));
		//}
		//System.out.println("Hash frequency:");
	
		updateHash(query,r, list, temporal);
			
		//printHash(suggestions);
		//System.out.println(temporal);
		temporal = splitText(description);
		///System.out.println("--------");	
		updateHash(query,r, list, temporal);

		//return suggestions;
	}
	
	
	public void printHash(Hashtable<String, Integer> suggestions){
		
		Enumeration<String> keys = suggestions.keys();
		
		while(keys.hasMoreElements()){
			String key = keys.nextElement();
			System.out.println(key + "\t"+ suggestions.get(key));
			
		}
	}

	/**
	 * try to match bigrams in text
	 * 
	 * if bigram is match it is added directly
	 * 
	 * otherwise each term is added independently
	 * 
	 * @param suggestions
	 * 
	 * @param temporal
	 */

	private void updateHash(String query, String r,Hashtable<String, Integer> suggestions,
			LinkedList<String> temporal) {
		// TODO Auto-generated method stub
				
	
		for (int i = 0; i < temporal.size(); i++) {
			int n_gram=5;
			boolean found=false;
			while(n_gram>=0 && !found){

				
				if(i+n_gram<temporal.size())
				{
					
					String gram=temporal.get(i);
					int index=1;
					for(int j=i+1; j<temporal.size() && index<=n_gram; j++){

						index++;
						gram = gram + "_" + temporal.get(j);
					}
					//System.out.println("Iterating:"+ temporal.get(i));
					//System.out.println("ngram:" + gram + "\t"+ n_gram);
					 
			//	System.out.println("Before cleaning " + gram);
					gram= cleanStopWords(gram);
					
					
					
				//	System.out.println("Attempting to write " + gram);
					if (!gram.equals("")&& !cleaner.isStopWord(gram) && (voc.contains(gram) || query.contains(gram.replace("_", " ")) || r.contains(gram.replace("_", " ")))) {
					
					
					//	System.out.println(gram + " gram writed");
						Integer freq=1;
						if (suggestions.containsKey(gram)) {
							freq = suggestions.get(gram) + 1;

						}

						suggestions.put(gram, freq);
						found=true;
						
						//i= i+n_gram;
					}

				}
				
				/*if(n_gram==0){
						
					String gram=temporal.get(i);
					if(!cleaner.isTagTrash(gram)){
					Integer freq=1;
					if (suggestions.containsKey(gram)) {
						freq = suggestions.get(gram) + 1;

					}
					found=true;
					suggestions.put(gram, freq);
					}
					
				}*/
				
				n_gram--;
				
			}
		
		}

	}
	
	
	
	public String cleanStopWords(String a){
		
		
		
		String b= "";
		
		
		String aa[]= a.split("_");
		boolean allstop=true;
		for(int i =0; i<aa.length;i++){
		
			if(!cleaner.isStopWord(aa[i])){
				
				allstop=false;
			}
		}
		
		if(allstop==true) return "";
	
		if(aa.length>1){
			//System.out.println(aa[aa.length-1] + "\t" +cleaner.isStopWord(aa[aa.length-1]) );
		//	System.out.println(a);
			while(cleaner.isStopWord(aa[aa.length-1])){
				a=remove_last(a);
				aa=null;
				aa= a.split("_");
			}
			
			return a;
			
		}
		b=a;
		if(a.startsWith("a_"))
			b= a.replaceFirst("a_", "");
		
		if(a.startsWith("to_"))
			b= a.replaceFirst("to_", "");
		
	
		return b;
		
	}

	public String remove_last(String a){
		String b;
		int index = a.lastIndexOf("_");
		
		b= a.substring(0,index);
		return b;
		
	}
	public LinkedList<String> splitText(String text) {

		LinkedList<String> suggestions = new LinkedList<String>();

		if (text == null) {

			return suggestions;
		}

		text = text.replaceAll("\\s+", " ").toLowerCase();
		String words[] = text.split(" ");
		for (int i = 0; i < words.length; i++) {
			
			String word = CleanTag.normalizeTag(words[i]);

			
			if (!cleaner.isTagTrashignoreStopWords(word) && !word.trim().equals("")) {
				suggestions.add(word.trim());

			}

		}

		return suggestions;

	}

	public void exec(String query,String r, String url, String title, String snippet, int threshold,Hashtable<String, Integer> list) {

	
			
	
			query = query.replaceAll("[.,;\"':_|-]", " ");
			title = title.replaceAll("[.,;:'\"_|-]", " ");
			snippet = snippet.replaceAll("[.',\";:_|-]", " ");
			//System.out.println(snippet);
			getTags(query, r,title, snippet,
					threshold, list);

			

			Enumeration<String> keys = list.keys();

		while(keys.hasMoreElements()){
			
			String key  = keys.nextElement();
			if(list.get(key)>threshold)


				//if(query.equals("2005 hasbro toys"))
					System.out.println("bs4234d234234\t2009-30-14\t"+url+"\t" + key+"\t"+ list.get(key));
		}
		

	}
	
	public static void main(String args[]){
		
		
		String stop_words="/home/sergio/data/graph/stopwords.txt";
		
		String vocabulary="/home/sergio/data/graph/complete_vocabulary.txt";
		String bing_results="/home/sergio/data/graph/query_bing_intersection.txt";
		
		
		GenerateBookmarks g = new GenerateBookmarks(stop_words,vocabulary);
		
		FileInput in =new FileInput(bing_results);
	
		String line = in.readString();
		String previous_query="---";
		while(line!=null){
			line = line.toLowerCase();
			String t[]=line.split("\t");
			
			String q= t[0];
			String r="";
			if(t.length>1)
			 r= t[1];
			String url="";
			if(t.length>2)
			  url = t[2];
			
			String title="";
			String desc="";
			if(t.length>3){
				title=t[3];
				
				if(t.length>4){
					
					desc=t[4];
				}
			}
			Hashtable<String, Integer> list= new Hashtable<String,Integer>();
			int threshold=0;
			g.exec(q, r,url,title, desc, threshold,list);
			line =in.readString();
			
		}
		
		
		
	}

	
}
