package edu.cn.nlsde.tmfst.EVA;

import edu.cn.nlsde.Utils.IOUtil;
//import gnu.trove.TObjectIntHashMap;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;

public class ExtrinsicCohrenceScore {

//    public String rootPath = "G:\\我的文件\\资料\\数据\\wikipedia_150w.txt";
//    HashSet<String> wordSet = new HashSet<String>();
//    TObjectIntHashMap wordPairs = new TObjectIntHashMap();
//    ArrayList<ArrayList<ArrayList<String>>> topic_words = new ArrayList<ArrayList<ArrayList<String>>>();
//    int Wiki_doc_count = 0;
//
//    public static void print_result(String path) throws IOException {
//        BufferedReader reader = IOUtil.getReader(path);
//        String line = null, line2 = null;
//        while ((line = reader.readLine()) != null) {
//            line2 = line;
//        }
//        System.out.println(line2);
//        reader.close();
//    }
//
//    public static void store_topics_to_one_file() throws IOException {
//        String[] dirStrings = {
//                "AliasPAM4ST_1000_100_0.001_0.1_0.01"
////				"PAM4ST_1500_100_0.1_0.1_0.01",
////				"PAM4ST_100_100_0.1_0.1_0.01",
////				"PAM4ST_500_100_0.1_0.1_0.01",
////				"PAM4ST_1000_100_0.1_0.1_0.01",
////				"PAM4ST_2000_100_0.1_0.1_0.01",
////				"SparsePAM4ST_V3_1500_100_0.1_1.0E-7_0.1_0.01",
////				"SparsePAM4ST_V3_100_100_0.1_1.0E-7_0.1_0.01",
////				"SparsePAM4ST_V3_500_100_0.1_1.0E-7_0.1_0.01",
////				"SparsePAM4ST_V3_1000_100_0.1_1.0E-7_0.1_0.01",
////				"SparsePAM4ST_V3_2000_100_0.1_1.0E-7_0.1_0.01",
////				"SP_SATM_1000_100_0.1_0.01",
////				"LDA-0.1-0.01-100",
////				"unigram",
////				"BTM",
////				"DsparseTM_2000"
//        };
//        String[] dataNameList = {
////				"news",
//                "dblp-6",
////				"new-tweet"
//        };
//
////		BufferedWriter Awriter1 = Commons.IOUtil.getWriter("data4/All_Topics_10.txt");
////		BufferedWriter Awriter2 = Commons.IOUtil.getWriter("data4/All_Topics_15.txt");
//
//        for (String dataName : dataNameList) {
//            ExtrinsicCohrenceScore kkCohrenceScore = new ExtrinsicCohrenceScore();
//            String dataRoot = "data4/" + dataName + "/";
//            for (int me = 0; me < 1; me++) {
//                String methodRoot = dataRoot + dirStrings[me] + "/";
//                for (int i = 0; i < 5; i++) {
//                    String instanceRoot = methodRoot + i + "/";
//                    kkCohrenceScore.readTopic_words(instanceRoot + "model-final.phi", instanceRoot + "wordmap.txt");
//                }
//            }
//            BufferedWriter writer1 = IOUtil.getWriter(dataRoot + dataName + "_alias_Topics_10.txt");
////			BufferedWriter writer2 = Commons.IOUtil.getWriter(dataRoot+dataName+"_BTM_Topics_15.txt");
//            for (int me = 0; me < 1; me++) {
//                for (int i = 0; i < 5; i++) {
//                    kkCohrenceScore.print_topics(writer1, 5 * me + i, 10);
////					kkCohrenceScore.print_topics(writer2,5*me+i,15);
//                }
//            }
//            writer1.close();
////			writer2.close();
//        }
//    }
//
//    public static void main(String[] args) throws IOException {
//        store_topics_to_one_file();
////		String[] dirStrings = {"PAM4ST(0.01_innerstep_1)","SP_SATM","LDA-0.01-0.01","unigram","wntm","DsparseTM"};
////		String[] phiStrings={"phi.txt","phi.txt","model-final.phi","mu_phi_1.txt","model-final.phi","model-final.phi"};
////		String[] wordMapStrings={"wordmap.txt","wordmap.txt","wordmap.txt","mu_wordmap.txt","wordmap.txt","wordmap.txt"};
////		ExtrinsicCohrenceScore kkCohrenceScore = new ExtrinsicCohrenceScore();
////
////		for(int me=0;me<6;me++)
////		{
////			String root = "data2/news/"+dirStrings[me]+"/";
////			for(int i=0;i<5;i++){
////				String rootPath = root+i+"/";
////				kkCohrenceScore.readTopic_words(rootPath+phiStrings[me],rootPath+wordMapStrings[me]);
////			}
////		}
////		//kkCohrenceScore.set_word_pairs();
////		///////////////////////////////
////		//kkCohrenceScore.read_wiki("G:\\我的文件\\资料\\数据\\wikipedia_150w.txt","data2/news/dict_wiki_150w.txt");
////		//kkCohrenceScore.get_word_pairs_from_dict("data2/news/dict_wiki_380w.txt");
////		//////////////////////////////////////////
////		BufferedWriter writer1 = Commons.IOUtil.getWriter("data2/news/TopicsAllInOne_10.txt");
////		BufferedWriter writer2 = Commons.IOUtil.getWriter("data2/news/TopicsAllInOne_15.txt");
////		for(int me=0;me<6;me++)
////		{
////			String root = "data2/news/"+dirStrings[me]+"/";
////			System.out.println("topword=10");
////			for(int i=0;i<5;i++){
////				String rootPath = root+i+"/";
////				//kkCohrenceScore.calculate_coherence(5*me+i,10);
//////				print_result(rootPath+".cohrence.10");
////			}
////			System.out.println("topword=15");
////			for(int i=0;i<5;i++){
////				String rootPath = root+i+"/";
////				//kkCohrenceScore.calculate_coherence(5*me+i,15);
//////				print_result(rootPath+".cohrence.15");
////			}
////		}
////		writer1.close();
////		writer2.close();
//
//
//    }
//
//    public double log(double value, double base) {
//        return Math.log(value) / Math.log(base);
//    }
//
//    private void set_word_pairs() {
//        String myW1, myW2;
//        for (String word1 : wordSet) {
//            for (String word2 : wordSet) {
//                myW1 = word1;
//                myW2 = word2;
//                if (word1.compareTo(word2) > 0) {
//                    myW1 = word2;
//                    myW2 = word1;
//                }
//                wordPairs.put(myW1 + "|" + myW2, 0);
//            }
//        }
//    }
//
//    private void read_wiki(String rootPath, String store_path) throws IOException {
//        this.rootPath = rootPath;
//        long starttime = System.currentTimeMillis();
//        System.out.println("counts:" + wordPairs.size());
//        BufferedReader reader = IOUtil.getReader(rootPath);
//        String line = null, myW1, myW2, wpitem;
//        int lengthAvg = 0;
//        while ((line = reader.readLine()) != null) {
//            Wiki_doc_count++;
//            if (Wiki_doc_count % 1000 == 0) {
//                System.out.println(Wiki_doc_count + "\t" + ((System.currentTimeMillis() - starttime) / 1000) + "\t" + (lengthAvg / 1000));
//                lengthAvg = 0;
//            }
//            HashSet<String> oneLineWordMap = new HashSet<String>();
//            ArrayList<String> oneLineWordList = new ArrayList<String>();
//            String[] lines = line.split("\\s+");
//            for (String item : lines) {
//                if (oneLineWordMap.contains(item)) {
//                    continue;
//                }
//                oneLineWordMap.add(item);
//                if (wordSet.contains(item)) {
//                    oneLineWordList.add(item);
//                }
//
//            }
//            int theLength = oneLineWordList.size();
//            lengthAvg += theLength;
//            //System.out.println(theLength);
//            for (int i1 = 0; i1 < theLength; i1++) {
//                for (int i2 = i1; i2 < theLength; i2++) {
//                    myW1 = oneLineWordList.get(i1);
//                    myW2 = oneLineWordList.get(i2);
//                    if (myW1.compareTo(myW2) > 0) {
//                        myW1 = oneLineWordList.get(i2);
//                        myW2 = oneLineWordList.get(i1);
//                    }
//                    wpitem = myW1 + "|" + myW2;
//                    Integer theStoreOne = wordPairs.get(wpitem);
//                    wordPairs.put(wpitem, theStoreOne + 1);
//                }
//            }
//        }
//
//        reader.close();
//        System.out.println("counts:" + Wiki_doc_count);
//
//        BufferedWriter writer = IOUtil.getWriter(store_path);
//        writer.write(Wiki_doc_count);
//        for (Object ones : wordPairs.keys()) {
//            writer.write(ones + " " + wordPairs.get(ones) + "\n");
//        }
//        writer.close();
//
//    }
//
//    public HashMap<Integer, String> readWordMap(String wordMap_path) throws IOException {
//        HashMap<Integer, String> result = new HashMap<Integer, String>();
//        BufferedReader reader = IOUtil.getReader(wordMap_path);
//        String line = null;
//        while ((line = reader.readLine()) != null) {
//            String[] lines = line.split("\\s+");
//            if (lines.length != 2) {
//                continue;
//            }
//            result.put(Integer.parseInt(lines[1]), lines[0]);
//        }
//        reader.close();
//        return result;
//    }
//
//    public void readTopic_words(String phi_path, String wordMap_path) throws IOException {
//        HashMap<Integer, String> wordMap = readWordMap(wordMap_path);
//        BufferedReader reader = IOUtil.getReader(phi_path);
//        String line = null;
//        ArrayList<ArrayList<String>> theOneSet = new ArrayList<ArrayList<String>>();
//
//        while ((line = reader.readLine()) != null) {
//            String[] lines = line.split("\\s+");
//            HashMap<Integer, Double> lineDuoble = new HashMap<Integer, Double>();
//            for (int i = 0; i < lines.length; i++) {
//                lineDuoble.put(i, Double.parseDouble(lines[i]));
//            }
//            ArrayList<Entry<Integer, Double>> tmpentryList = new ArrayList<Entry<Integer, Double>>(lineDuoble.entrySet());
//            Collections.sort(tmpentryList, new Comparator<Entry<Integer, Double>>() {
//                public int compare(Entry<Integer, Double> o1, Entry<Integer, Double> o2) {
//                    return (o2.getValue().compareTo(o1.getValue()));
//                }
//            });
//            ArrayList<String> oneLineResult = new ArrayList<String>();
//            for (int i = 0; i < 15; i++) {
//                String thestemword = wordMap.get(tmpentryList.get(i).getKey());
//                oneLineResult.add(thestemword);
//                wordSet.add(thestemword);
//            }
//            theOneSet.add(oneLineResult);
//        }
//        topic_words.add(theOneSet);
//        reader.close();
//    }
//
//    public void calculate_coherence(int corpusNum, int topN) {
//        double t_score = 0.0;
//        for (int k = 0; k != topic_words.get(corpusNum).size(); k++) {
//            //System.out.println("\n");
//            double score = 0.0;
//            for (int i = 1; i != topN; i++) {
//                //System.out.println(topic_words.get(corpusNum).get(k).get(i));
//                for (int j = 0; j != i; j++) {
//                    score += this.getCoherenceScore(topic_words.get(corpusNum).get(k).get(i),
//                            topic_words.get(corpusNum).get(k).get(j));
//                }
//            }
//            t_score += 2.0 / (topN * (topN - 1)) * score;
////			System.out.println("topic " + k + ": " +score);
//            //System.out.println(score);
//        }
//        System.out.println(t_score / topic_words.get(corpusNum).size());
//    }
//
//    private double getCoherenceScore(String word1, String word2) {
//        double word1pp = wordPairs.get(word1 + "|" + word1) + 0.01;
//        double word2pp = wordPairs.get(word2 + "|" + word2) + 0.01;
//        double word12 = 0;
//        if (wordPairs.containsKey(word1 + "|" + word2)) {
//            word12 = wordPairs.get(word1 + "|" + word2) + 0.01;
//        } else {
//            word12 = wordPairs.get(word2 + "|" + word1) + 0.01;
//        }
//        return log(word12 * Wiki_doc_count / (word1pp * word2pp), 10);
//    }
//
//    public void get_word_pairs_from_dict(String dictPath) throws IOException {
//        BufferedReader reader = IOUtil.getReader(dictPath);
//        String line = null;
//        ArrayList<ArrayList<String>> theOneSet = new ArrayList<ArrayList<String>>();
//        Wiki_doc_count = Integer.parseInt(reader.readLine());
//        while ((line = reader.readLine()) != null) {
//            String[] lines = line.split("\\s+");
//            wordPairs.put(lines[0], Integer.parseInt(lines[1]));
//        }
//        reader.close();
//    }
//
//    public void print_topics(BufferedWriter writer, int corpusNum, int topN) throws IOException {
//        String str = "";
//        for (int k = 0; k != topic_words.get(corpusNum).size(); k++) {
//            str = "";
//            for (int i = 0; i != topN; i++) {
//                str += topic_words.get(corpusNum).get(k).get(i) + " ";
//            }
//            writer.write(str.trim() + "\n");
//        }
//
//    }
}
