package zunge.twitterdel.typostat;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Hashtable;
import java.util.List;

import zunge.tools.StringDistance;

public class TypoCheck {
	
	public static int str_dist_threshold = 5;
	public static double cosine_sim_threshold = 0.7;	
	
	public static boolean isTypo(String s1, String s2){
		
		//Substring: Old string is a substring of new string: Incomplete Tweet
		if(s2.indexOf(s1)>=0){return true;}
		
		//String distance
		int dist = StringDistance.distance(s1, s2);
		if(dist < str_dist_threshold){return true;}
		
		//String distance after alphabetical reorder
		String ss1 = alphabetReorder(s1);
		String ss2 = alphabetReorder(s2);
		int distr = StringDistance.distance(ss1, ss2);
		if(distr < str_dist_threshold){return true;}
		
		return false;
	}
	static String alphabetReorder(String s){
		List<String> tokens = tokenize(s);
		Collections.sort(tokens);
		String res = "";
		for(int i = 0;i < tokens.size();++i){
			res += " "+tokens.get(i);
		}
		return res;
	}

	
	
	public static boolean isRepost(String s1, String s2){
		
		//Cosine Similarity
		double cosineSim = calcCosineSim(s1,s2);
		if(cosineSim > cosine_sim_threshold){
			/**/System.out.println("True\n"+s1+"\n"+s2+"\n");
			return true;}
		else if(cosineSim > 0.7 * cosine_sim_threshold){
			/**/System.out.println("False\n"+s1+"\n"+s2+"\n");
		}
		
		return false;
	}
	
	static double calcCosineSim(String s1,String s2){
		List<String> tokens1 = tokenize(s1);
		List<String> tokens2 = tokenize(s2);
		if(tokens1 == null || tokens2 == null){return -1;}
		
		Hashtable<String,Integer> tokenSet = new Hashtable<String,Integer>();//String, index
		for(int i = 0;i < tokens1.size();++i){addToSet(tokens1.get(i),tokenSet);}	
		for(int i = 0;i < tokens2.size();++i){addToSet(tokens2.get(i),tokenSet);}
		
		int[] vec1 = new int[tokenSet.size()];
		int[] vec2 = new int[tokenSet.size()];
		Arrays.fill(vec1, 0);
		Arrays.fill(vec2, 0);
		for(int i = 0;i < tokens1.size();++i){vec1[tokenSet.get(tokens1.get(i))] += 1;}
		for(int i = 0;i < tokens2.size();++i){vec2[tokenSet.get(tokens2.get(i))] += 1;}
		
		double up = 0;
		for(int i = 0;i < tokenSet.size();++i){up += vec1[i]*vec2[i];}
		double down1 = 0;
		double down2 = 0;
		for(int i = 0;i < tokenSet.size();++i){down1 += vec1[i]*vec1[i];down2 += vec2[i]*vec2[i];}
		double sim = up / Math.sqrt(down1 * down2);
		return sim;
	}
	static void addToSet(String s,Hashtable<String,Integer> tokenSet){
		if(!tokenSet.containsKey(s)){
			tokenSet.put(s,tokenSet.size());
		}
	}
	
	
	
	static List<String> tokenize(String s){
		//Remove characters at the end
		//**System.out.println(s);
		while( s.length()>0 && !Character.isLetterOrDigit(s.charAt(s.length()-1)) ){
			s = s.substring(0,s.length()-1);
		}
		//**System.out.println("CLEAN: "+s);
		if(s.length()==0){return new ArrayList<String>();}
		
		//Tokenize
		String[] frags = s.split(" ");
		List<String> tokens = new ArrayList<String>();
		for(int i = 0;i < frags.length;++i){
			String c = frags[i];
			//Filter Entities
			if(c.length()<1)//Empty
				{continue;}
			else if(c.length()>1 && c.charAt(0)=='@')//mention
				{c = c.substring(1);}
			else if(c.length()>1 && c.charAt(0)=='#' 
					&& Character.isLetterOrDigit(c.charAt(1)))//hashtag
				{c = c.substring(1);}
			else if(c.length()>2 && c.substring(0,2)=="RT")//retweet
				{c = c.substring(2);}
			else if(c.length()>7 && c.substring(0,7)=="http:////")//URL
				{c = "HTTP";}
			
			if(c.length()>=1){
				tokens.add(c);
			}
		}
		
		//**System.out.println("TOKENIZE: "+s);
		//**if(tokens.size()>1){for(int i = 0;i < tokens.size();++i){System.out.print(" "+tokens.get(i));}System.out.print("\n");}
		
		return tokens;
	}

}
