package wntm;

import java.io.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class InferenceTopicsForOrgDocs {

    private Map<String, double[]> word2theta = new HashMap<String, double[]>();
    // private Map<String, Integer> wordWeight = new HashMap<>();

    public static void main(String args[]) {
//        String dataname = "new-tweet";
//        String method = "GSLDA_without_specific";
//        String root = "data/news/wntm_w2v/";
//        String kroot = root + "LDA_0.1_0.01_100/0/";
//        InferenceTopicsForOrgDocs inference = new InferenceTopicsForOrgDocs();
//        inference.InferenceTopics(root + "Train.data.words",
//                kroot+"model-final.theta",
//                root + "Train.data.wf", kroot+ "model.theta");


        String[] nams = {
                "news",
//                "new-tweet",
//                "dblp",
//                "yahooQA",
//                "20newsgroup",
//                "stackoverflow"
        };
        String[] methods = {
                "LDA_0.1_0.01_",
//                "GSLDA",
//                "GSLDA_without_background_0.1_0.01_0.3_",
//                "GSLDA_without_background_0.1_0.01_5.0_100",
//                "GSLDA_without_background_0.1_0.01_5.0_100"
        };
        for (String dataname : nams) {
            String root = "data4/" + dataname + "/wntm_noWeight/";
            for (String method : methods) {
                for (int topic_num = 20; topic_num < 30; topic_num += 20) {
                    for (int i = 0; i < 2; i++) {
                        String kroot = root + method + topic_num + "/" + i + "/";
                        InferenceTopicsForOrgDocs inference = new InferenceTopicsForOrgDocs();
                        inference.InferenceTopics(root + dataname + ".words",
                                kroot + "model-final.theta",
                                root + dataname + ".data", kroot + "model.theta");
                    }
                }
            }
        }
    }

    private int loadWordsAndTheta(String wordsFile, String thetaFile) {
        BufferedReader wordsReader = this.getReader(wordsFile, "utf-8");
        BufferedReader thetaReader = this.getReader(thetaFile, "utf-8");

        int numberOfTopics = -1;

        try {
            String wordsLine = wordsReader.readLine(), thetaLine = thetaReader.readLine();
            numberOfTopics = thetaLine.trim().split("\\s+").length;
            while (wordsLine != null) {
                if (thetaLine == null) {
                    System.err.println("# of lines in .words file dose not match the # in .theta file!");
                    System.exit(0);
                }
                //System.out.println(wordsLine.trim());
                String[] tmpLines = wordsLine.split("\\s+");
                wordsLine = tmpLines[0];
                if (word2theta.containsKey(wordsLine.trim())) {
                    System.err.println("Duplicate word exist in .words file!");
                    System.exit(0);
                }


                String[] thetaValsInStr = thetaLine.trim().split("\\s+");
                if (thetaValsInStr.length != numberOfTopics) {
                    System.err.println("Wrong number of topics in .theta file!");
                    System.exit(0);
                }

                double[] theta = new double[numberOfTopics];
                for (int i = 0; i != numberOfTopics; i++) {
                    theta[i] = Double.valueOf(thetaValsInStr[i]);
                }
                //  wordWeight.put(wordsLine.trim(),Integer.parseInt(tmpLines[1]));
                word2theta.put(wordsLine.trim(), theta);

                wordsLine = wordsReader.readLine();
                thetaLine = thetaReader.readLine();
            }
            wordsReader.close();
            thetaReader.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        return numberOfTopics;
    }

    public void InferenceTopics(String wordsFile, String wordsThetaFile, String orgDocsFile, String docsThetaFile) {
        int numberOfTopics = this.loadWordsAndTheta(wordsFile, wordsThetaFile);

        BufferedReader orgDocsReader = this.getReader(orgDocsFile, "utf-8");
        BufferedWriter docsThetaWriter = this.getWriter(docsThetaFile, "utf-8");

        try {
            String orgDoc = orgDocsReader.readLine();
            while (orgDoc != null) {
                double[] orgTheta = new double[numberOfTopics];
                String[] tokens = orgDoc.trim().split("\\s+");
                int length = 0;
                for (String token : tokens) {
                    double[] wordTheta = word2theta.get(token);
                    if (wordTheta != null) { // some times word occurs in the original text dose not occur in word network.
                        for (int i = 0; i != numberOfTopics; i++)
                            orgTheta[i] += wordTheta[i];
                        length++;
                    }
                }
                StringBuilder sb = new StringBuilder("");
                for (int i = 0; i != numberOfTopics; i++) {
                    orgTheta[i] /= length;
                    sb.append(orgTheta[i] + " ");
                }
                docsThetaWriter.append(sb.toString().trim());
                docsThetaWriter.newLine();
                orgDoc = orgDocsReader.readLine();
            }

            orgDocsReader.close();
            docsThetaWriter.flush();
            docsThetaWriter.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    public double[][] InferenceTopics(List<String> wordsFile, double[][] wordsThetaFile, List<List<String>> orgDocsFile) {
        int numberOfTopics = wordsThetaFile[0].length;
        for (int i = 0; i < wordsFile.size(); i++) {
            word2theta.put(wordsFile.get(i), wordsThetaFile[i]);
        }

        double[][] result = new double[orgDocsFile.size()][];
        int index = 0;
        for (List<String> doc : orgDocsFile) {
            double[] orgTheta = new double[numberOfTopics];
            int length = 0;
            for (String token : doc) {
                double[] wordTheta = word2theta.get(token);
                if (wordTheta != null) { // some times word occurs in the original text dose not occur in word network.
                    for (int i = 0; i != numberOfTopics; i++)
                        orgTheta[i] += wordTheta[i];
                    length++;
                }
            }
            for (int i = 0; i != numberOfTopics; i++) {
                orgTheta[i] /= length;
            }
            result[index++] = orgTheta;
        }
        return result;
    }

    private BufferedReader getReader(String path, String charset) {
        BufferedReader reader = null;
        try {
            reader = new BufferedReader(new InputStreamReader(
                    new FileInputStream(path), charset));
        } catch (IOException e) {
            e.printStackTrace();
        }
        return reader;
    }

    private BufferedWriter getWriter(String path, String charset) {
        BufferedWriter writer = null;
        try {
            writer = new BufferedWriter(new OutputStreamWriter(
                    new FileOutputStream(path), charset));
        } catch (IOException e) {
            e.printStackTrace();
        }
        return writer;
    }
}
