package miningMinds;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;

import modules.*;

import org.w3c.dom.Document;
import org.w3c.dom.NodeList;

import com.sun.org.apache.bcel.internal.generic.GETSTATIC;

import parameter.*;
import functions.*;


/**
 * @author kjysmu
 *
 */

public class MiningMindsEngine {

	int levelOfCategory = 2;
	int topKCategory = 10;
	
	String default_snstype = "Facebook";
	
	Map<String, Double> NaverIDF;
	Map<String, Double> WikiICF;

	Map<String, Double> simMatrix;
	Map<String, Double> TopicProp;

	Map<String, Map<String, Integer>> usersLabelResults;
	Map<String, Map<String, Double>> userCategoryResult;

	Map<String, Map<String, Double>> NaverTFIDF;
	Map<String, Map<String, Double>> NaverWIKI;
	
	Map<String, Map<String, Double>> LDA_topicMap;
	Map<String, Double> LDA_topicDist;
	
	Map<String, Map<String, Double>> wikiLDA_topicMap;
	Map<String, Map<String, Double>> LDA_categoryDistMap;
	
	static DocumentBuilderFactory factory;
	static DocumentBuilder builder;
	static Document document;
	Wikimap wikimap;
	public MiningMindsEngine() throws Exception{
		init(default_snstype);
	}
	public MiningMindsEngine(String snstype) throws Exception{
		init(snstype);
	}
	
	public void init(String snstype) throws Exception{
		factory = DocumentBuilderFactory.newInstance();
		builder = factory.newDocumentBuilder();
		wikimap = new Wikimap();

		//LabeledUser labeledUser = new LabeledUser();
		//labeledUser.SaveCategoryCount(snstype);

		NaverTFIDF = new HashMap<String, Map<String, Double>>();
		NaverWIKI = new HashMap<String, Map<String, Double>>();
		NaverIDF = new HashMap<String, Double>();
		WikiICF = new HashMap<String, Double>();
		
		String line = "";
		//NaverTFIDF
		List<File> fileListTFIDF = FileFunction.getListOfFiles(Path.CATEGORY_MODEL_TFIDF_PATH);
		for (File file : fileListTFIDF) {
			Map<String, Double> tfidf = new HashMap<String, Double>();				
			tfidf = FileFunction.readMapStrDou(file);	
			NaverTFIDF.put(file.getName().replaceAll(".txt","").trim(), tfidf); 
		}
		
		//NaverWIKI
		List<File> fileListWIKI;
		if(Exp.approach.contains("icf")){
			if(Exp.subCategoryWeight_Naver != 0.0){
				fileListWIKI = FileFunction.getListOfFiles(Path.CATEGORY_MODEL_WIKI_CFICF_SUB_PATH + String.format("%.1f", Exp.subCategoryWeight_Naver) + "\\");		
			}else{
				fileListWIKI = FileFunction.getListOfFiles(Path.CATEGORY_MODEL_WIKI_CFICF_PATH);	
			}
		}else{
			fileListWIKI = FileFunction.getListOfFiles(Path.CATEGORY_MODEL_WIKI_CF_PATH);
		}
	
		for (File file : fileListWIKI) {
			Map<String, Double> wmap = new HashMap<String, Double>();
			wmap = FileFunction.readMapStrDou(file);	
			
			
			
			NaverWIKI.put(file.getName().replaceAll(".txt","").trim(), wmap  );
		}
		
		//----------- IDF Loading ----------------------------------------//
		NaverIDF = FileFunction.readMapStrDou(Path.IDF_FILEPATH);
		WikiICF = FileFunction.readMapStrDou(Path.WIKI_ICF_FILEPATH);	
		
		//------------ NaverLDATopic Loading ------------------------------//
		FileReader fr_lda = new FileReader(new File(Path.LDA_TOPIC_FILEPATH));
		BufferedReader br_lda = new BufferedReader(fr_lda);
		LDA_topicMap = new HashMap<String ,Map<String, Double>>();
		LDA_topicDist = new HashMap<String, Double>();
		line = "";
		while(true){
			line = br_lda.readLine();
			if(line == null) break;
			Map<String, Double> tmap = new HashMap<String, Double>();
			StringTokenizer token = new StringTokenizer(line, "\t");
			String topicName = "";
			if(token.countTokens() == 3){
				String token1= token.nextToken();
				topicName = token1;
				String token2= token.nextToken();
				LDA_topicDist.put(topicName, Double.parseDouble(token2));
				String token3= token.nextToken().trim();
				StringTokenizer tk = new StringTokenizer(token3, " ");
				while(tk.hasMoreTokens()){
					String tk1 = tk.nextToken();
					String tk2 = tk.nextToken();
					double tk2_num = Double.parseDouble( tk2.substring(1, tk2.length()-1) );
					tmap.put(tk1, tk2_num);
				}
				LDA_topicMap.put("Topic"+topicName, getTFIDF( TermFunction.getNorm(tmap) ) );
			}
		}
		fr_lda.close();
		br_lda.close();
		//------------ NaverWikiLDATopic Loading ---------------//
		List<File> topic_list = FileFunction.getListOfFiles(Path.LDA_TOPIC_WIKI_PATH);
		wikiLDA_topicMap = new HashMap<String, Map<String, Double>>();
		for (File tplist : topic_list) {
			Map<String, Double> map_topic = new HashMap<String, Double>();
			map_topic = FileFunction.readMapStrDou(tplist);
			wikiLDA_topicMap.put(tplist.getName().replaceAll(".txt",""), map_topic);
		}
		//------------ NaverLDACategoryModel Loading ---------------//
		
		LDA_categoryDistMap = new HashMap<String, Map<String, Double> >();
		FileReader fr_lda_c = new FileReader(new File(Path.CATEGORY_MODEL_LDA_PATH));
		BufferedReader br_lda_c = new BufferedReader(fr_lda_c);
		Map<Integer, String> indexMap = new HashMap<Integer, String>();
		line = "";
		while(true){
			line = br_lda_c.readLine();
			if(line == null) break;
			String tokens[] = line.split("\t");
			int ct = 0;
			if(tokens[0].equals("NaverLDA")){
				ct = 0;
				for(String token : tokens){
					if(ct > 0) indexMap.put(ct, token);
					ct++;
				}
			}else{
				ct = 0;					
				Map<String, Double> countmap = new HashMap<String, Double>();
				String category_name = "";
				for(String token : tokens){
					if(ct == 0) category_name = token;
					else if(ct > 0) countmap.put("Topic"+indexMap.get(ct), Double.parseDouble( token ));
					ct++;
				}
				LDA_categoryDistMap.put(category_name,  countmap );
			}
		}
		br_lda_c.close();
		fr_lda_c.close();
	} // End of Initialization
	
	public Map<String, Double> getNaverIDF (){
		return NaverIDF;
	}
	public Map<String, Double> getNaverTFIDF(String name){
		return NaverTFIDF.get(name);
	}
	public Map<String, Double> getTFIDF (Map<String, Double> map){
		return DocumentFunction.GetTFIDF(map, NaverIDF);
	}
	public Map<String, Double> getWikiTCICF (Map<String, Double> map){
		return DocumentFunction.GetTFIDF(map, WikiICF);
	}
	
	boolean isNaN(double x) {return x != x;}
	
	public static double logB(double x, double base) {
		return Math.log(x) / Math.log(base);
	}
	public Map<String, Double> getCategoryList( SNSUserBean userBean ) throws Exception {
		Map<String, Double> termCounts = userBean.getTotalTermCount();
		Map<String, Double> similarities = new HashMap<String, Double>();		
		for(Map.Entry<String, Map<String,Double>> NaverTFIDFEntry : NaverTFIDF.entrySet() ){
    		if(Exp.btfidf_user){
    			Map<String, Double> user_tfidf = getTFIDF(termCounts);
                similarities.put(NaverTFIDFEntry.getKey(), getCosineSimilarity(user_tfidf, NaverTFIDFEntry.getValue()) );
    		}else{
    			similarities.put(NaverTFIDFEntry.getKey(), getCosineSimilarity(termCounts, NaverTFIDFEntry.getValue()) );
    		}
    	}
		return similarities;
	}

	public Map<String, Double> getCategoryList( SNSUserMsgBean userMsgBean ) throws Exception {
		
		Map<String, Double> similarities = new HashMap<String, Double>();
		Map<String, Double> topsimilarities = new HashMap<String, Double>();
		Map<String, Double> termCounts = userMsgBean.getTermCountMap();

		if(Exp.approach.contains("tfidf")){
			if(Exp.approach.contains("lda")){
				Map<String, Double> map_feature = new HashMap<String, Double>();
		        for(Map.Entry<String, Map<String,Double>> LDA_topicMapEntry : LDA_topicMap.entrySet()){
		        	Double sim = getCosineSimilarity( getTFIDF( TermFunction.getNorm(termCounts)) , LDA_topicMapEntry.getValue());
		        	map_feature.put(LDA_topicMapEntry.getKey(), sim );
		        } 
				for(Map.Entry<String, Map<String,Double>> LDA_distEntry : LDA_categoryDistMap.entrySet() ){
		        	Double sim = getCosineSimilarity( map_feature, LDA_distEntry.getValue() );
					similarities.put(LDA_distEntry.getKey(), sim );
				}
			}else{
		    	for(Map.Entry<String, Map<String,Double>> NaverTFIDFEntry : NaverTFIDF.entrySet() ){
		    		if(Exp.btfidf_user){
		    			Map<String, Double> user_tfidf = getTFIDF(termCounts);
		                similarities.put(NaverTFIDFEntry.getKey(), getCosineSimilarity(user_tfidf, NaverTFIDFEntry.getValue()) );
		    		}else{
		    			similarities.put(NaverTFIDFEntry.getKey(), getCosineSimilarity(termCounts, NaverTFIDFEntry.getValue()) );
		    		}
		    	}
			}
		}else if(Exp.approach.contains("wiki")){
			Map<String, Double> wikiCategory = new HashMap<String,Double>();
			for (Map.Entry<String, Double> termcount : termCounts.entrySet())
			{
				SNSTermBean termBean = new SNSTermBean();
				
				Double term_idf = 0.0;
				String term = termcount.getKey();
				termBean.setTerm(term);
				
				Double TF = termcount.getValue();
				termBean.setTF(TF);
				
				String wikiTerm = term;
				
				/*------------------------------------ Term IDF  ---------------------------------------*/
				if(wikimap.isRedirect(term)){
					String term_rd = wikimap.getRedirect(term);
					termBean.setRedirectTerm(term_rd);
					
					if( NaverIDF.get(term) == null && NaverIDF.get(term_rd) == null  ){
						term_idf = 0.0;
					}else{
						if( NaverIDF.get(term) != null ) term_idf = NaverIDF.get(term);
						if( NaverIDF.get(term_rd) != null ) term_idf = NaverIDF.get(term_rd);
					}
					wikiTerm = term_rd;
				}else{
					if( NaverIDF.get(term) == null ){
						term_idf = 0.0;
					}else{
						term_idf = NaverIDF.get(term);
					}
				}
				
				termBean.setIDF(term_idf);
				/*------------------------------------- Homonym Model ----------------------------------*/
				if( wikimap.isHomonym(wikiTerm)){
					List<String> homonymList = wikimap.getHomonym(wikiTerm);
				
					// Map<String, Double> homonymMap = new HashMap<String, Double>();
					for(String homonymTerm : homonymList){
						SNSTermBean homonymTermBean = new SNSTermBean();
						homonymTerm = wikimap.termNorm(homonymTerm);
						homonymTermBean.setTerm(homonymTerm);
						
						if(wikimap.isRedirect(homonymTerm)){
							homonymTerm = wikimap.getRedirect(homonymTerm);
							homonymTermBean.setRedirectTerm(homonymTerm);
						}
						
						// Map<String, Double> termCountsHomonym = wikimap.getArticleTermCounts(homonymTerm);
						// Double hm_sim = getCosineSimilarity(termCountsHomonym, termCounts );
						// homonymMap.put(homonymTerm, hm_sim);
						
						termBean.addHomonymTermList(homonymTermBean);	
					}
					/*
					DoubleValueComparator bvc = new DoubleValueComparator(homonymMap);
					TreeMap<String, Double> homonymSortMap = new TreeMap<String, Double>(bvc);
					homonymSortMap.putAll(homonymMap);
					for (Entry<String, Double> homonymSortMapEntry : homonymSortMap.entrySet()) {
						String topHomonymTerm = homonymSortMapEntry.getKey();
						List<String> categoryList = wikimap.getCategory(topHomonymTerm);
						for(String category : categoryList){
							if(wikiCategory.containsKey(category)){
								if( Exp.isWikiIDF ) wikiCategory.put(category, wikiCategory.get(category) + term_idf);
								else wikiCategory.put(category, wikiCategory.get(category) + 1.0);
							}else{
								if( Exp.isWikiIDF ) wikiCategory.put(category, term_idf);
								else wikiCategory.put(category, 1.0);
							}
						}
						break;
					}
					*/
						
				}else if(wikimap.hasCategory(wikiTerm)){
					/*---------------------------------WikiCategory Model------------------------------------*/
					List<String> categoryList = wikimap.getCategory(wikiTerm);
					for(String category : categoryList){
						if(wikiCategory.containsKey(category)){
							if( Exp.isWiki_NaverIDF ){
								wikiCategory.put(category, wikiCategory.get(category) + term_idf);
								termBean.addWikiCategoryMap(category, term_idf);
							}
							else {
								wikiCategory.put(category, wikiCategory.get(category) + 1.0);
								termBean.addWikiCategoryMap(category, 1.0);
							}
						}else{
							if( Exp.isWiki_NaverIDF ) {
								wikiCategory.put(category, term_idf);
								termBean.addWikiCategoryMap(category, term_idf);
							}
							else {
								wikiCategory.put(category, 1.0);
								termBean.addWikiCategoryMap(category, 1.0);
							}
						}
					}
				}
				
				userMsgBean.addTermList(termBean);
				
			}//End of For-TermCount
			
			userMsgBean.setWikiCategoryMap(wikiCategory);
			
			if( Exp.approach.contains("lda") ){
				Map<String, Double> map_feature = new HashMap<String, Double>();
		        for(Map.Entry<String, Map<String,Double>> wikiLDA_topicEntry : wikiLDA_topicMap.entrySet()){
		        	Double sim = getCosineSimilarity(wikiCategory , wikiLDA_topicEntry.getValue());
		        	map_feature.put(wikiLDA_topicEntry.getKey(), sim );
		        } 
				for(Map.Entry<String, Map<String,Double>> wikiLDA_distEntry : LDA_categoryDistMap.entrySet() ){
					similarities.put(wikiLDA_distEntry.getKey(), getCosineSimilarity(map_feature, wikiLDA_distEntry.getValue()));
				}
			}else{
				for(Map.Entry<String, Map<String,Double>> NaverWIKI_entry : NaverWIKI.entrySet() ){
			    	//similarities.put(NaverWIKI_entry.getKey(), getCosineSimilarity( getWikiTCICF(wikiCategory) , NaverWIKI_entry.getValue()) );
					similarities.put(NaverWIKI_entry.getKey(), getWikiSimilarity( wikiCategory , NaverWIKI_entry.getValue()) );
				}
			}
			
		}//End of If-Wiki
		
		DoubleValueComparator bvc = new DoubleValueComparator(similarities);
		TreeMap<String, Double> tMap = new TreeMap<String, Double>(bvc);
		tMap.putAll(similarities);
		userMsgBean.setRecomTotalCategoryMap(similarities);

		double maxsim = 0.0;
		double totalsim = 0.0;

		for(Map.Entry<String, Double> similarity : tMap.entrySet() ){
			if( !(similarity.getValue() == 0 || similarity.getValue() == null || isNaN(similarity.getValue())) ){
				totalsim += similarity.getValue();
			}
			if( maxsim < similarity.getValue()) maxsim = similarity.getValue();
		}
		if(totalsim == 0 || isNaN(totalsim)){
			topsimilarities.clear();
			return topsimilarities;
		}
		
		/*--------------------------Threshold Modeling-----------------------------------------*/
		Iterator<Map.Entry<String,Double>> iter = tMap.entrySet().iterator();
		while (iter.hasNext()) {
			Map.Entry<String,Double> entry = iter.next();
			if(entry.getValue() == 0 || entry.getValue() == null || isNaN(entry.getValue()) ) iter.remove();
			else{
				double score = entry.getValue();
				double score2 = score / totalsim;
				if( score < Exp.msg_threshold ){
					iter.remove();
				}else if( score2 < Exp.msg_threshold2 ){
					iter.remove();
				}
			}
		}
		
		if(similarities.isEmpty()){
			topsimilarities.clear();
		}else{
			int count = 0;
			for(Map.Entry<String, Double> similarity : tMap.entrySet() ){
				topsimilarities.put(similarity.getKey(), similarity.getValue());
				count ++;
				if(Exp.msg_maxcategory == 0){
				}else if(count >= Exp.msg_maxcategory) break;
			}
		}
		return topsimilarities;
	}
	
	
	public double getCosineSimilarity(Map<String, Double> map1 , Map<String, Double> map2 ){
		if(map1.isEmpty() || map2.isEmpty()) return 0.0;
		else{
			Map<String, Double> freq1 = TermFunction.getNorm(map1);
			Map<String, Double> freq2 = TermFunction.getNorm(map2);
			double sim = DocumentFunction.ComputeCosineSimilarity( freq1, freq2 );
			return sim;
		}
	}
	

	// In Progress  ( wiki-sim method )
	public double getWikiSimilarity(Map<String, Double> map1 , Map<String, Double> map2 ){
		if(map1.isEmpty() || map2.isEmpty()) return 0.0;
		else{
			
			Map<String, Double> totalCategoryMap1 = new HashMap<String, Double>();
			//Map<String, Double> totalCategoryMap2 = new HashMap<String, Double>();
			
			Map<String, Double> categoryMap1 = TermFunction.getNorm(map1);
			Map<String, Double> categoryMap2 = TermFunction.getNorm(map2);
			if(Exp.subCategoryWeight_User != 0.0){
				Map<String, Double> subCategoryMap1 = getSubCategoryMap(categoryMap1);				
				totalCategoryMap1 = TermFunction.CombineCountsWeight(categoryMap1, subCategoryMap1, 1.0 - Exp.subCategoryWeight_User, Exp.subCategoryWeight_User);
			}else{
				totalCategoryMap1 = categoryMap1;
			}
			double sim = DocumentFunction.ComputeCosineSimilarity( getWikiTCICF(totalCategoryMap1) , categoryMap2 );
			return sim;
		}
	}
	public Map<String, Double> getSubCategoryMap(Map<String, Double> map1 ){
		Map<String, Double> subCategoryMap = new HashMap<String, Double>();
		for (Entry<String, Double> entry : map1.entrySet()) {
			String key = entry.getKey();
			Double value = entry.getValue();
			List<String> subCategoryList = wikimap.getSubCategory(key);
			for(String subCategory : subCategoryList){
				subCategoryMap.put(subCategory, value);
			}
		}
		
		return TermFunction.getNorm(subCategoryMap);	
	}

}
