package wntm;

import java.io.*;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;

public class InferenceTopicsForOrgDocs_weight {

    private Map<String, double[]> word2theta = new HashMap<String, double[]>();
    private Map<String, Double> wordWeight = new HashMap<>();

    public static void main(String args[]) {

//		if (args.length < 4) {
//			System.err.println("Lack of parameters!");
//			System.exit(0);
//		}
        // .words file; word theta file; original text file; original document theta file
        String root = "G:\\experiment\\data\\dblp\\";

        InferenceTopicsForOrgDocs_weight inference = new InferenceTopicsForOrgDocs_weight();
        inference.InferenceTopics(root + "dblp.words",
                root + "LDA_0.1_0.01_100/0/model-final.theta",
                root + "dblp.data", root + "model.theta");

    }

    private void get_idf(String path) {
        int docNum = 0;
        BufferedReader reader = this.getReader(path, "utf-8");
        try {
            String line;
            while ((line = reader.readLine()) != null) {
                docNum++;
                String[] lines = line.split("\\s+");
                HashSet<String> tmpSet = new HashSet<>();
                for (String word : lines) {
                    if (tmpSet.contains(word)) {
                        continue;
                    }
                    tmpSet.add(word);
                    if (!wordWeight.containsKey(word)) {
                        wordWeight.put(word, 0.0);
                    }
                    wordWeight.put(word, wordWeight.get(word) + 1);
                }
            }
            reader.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        for (String key : wordWeight.keySet()) {
            wordWeight.put(key, Math.log(docNum / wordWeight.get(key)));
        }
    }

    private int loadWordsAndTheta(String wordsFile, String thetaFile) {
        BufferedReader wordsReader = this.getReader(wordsFile, "utf-8");
        BufferedReader thetaReader = this.getReader(thetaFile, "utf-8");

        int numberOfTopics = -1;

        try {
            String wordsLine = wordsReader.readLine(), thetaLine = thetaReader.readLine();
            numberOfTopics = thetaLine.trim().split("\\s+").length;
            while (wordsLine != null) {
                if (thetaLine == null) {
                    System.err.println("# of lines in .words file dose not match the # in .theta file!");
                    System.exit(0);
                }
                System.out.println(wordsLine.trim());
                String[] tmpLines = wordsLine.split("\\s+");
                wordsLine = tmpLines[0];
                if (word2theta.containsKey(wordsLine.trim())) {
                    System.err.println("Duplicate word exist in .words file!");
                    System.exit(0);
                }


                String[] thetaValsInStr = thetaLine.trim().split("\\s+");
                if (thetaValsInStr.length != numberOfTopics) {
                    System.err.println("Wrong number of topics in .theta file!");
                    System.exit(0);
                }

                double[] theta = new double[numberOfTopics];
                for (int i = 0; i != numberOfTopics; i++) {
                    theta[i] = Double.valueOf(thetaValsInStr[i]);
                }
                word2theta.put(wordsLine.trim(), theta);

                wordsLine = wordsReader.readLine();
                thetaLine = thetaReader.readLine();
            }
            wordsReader.close();
            thetaReader.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        return numberOfTopics;
    }

    public void InferenceTopics(String wordsFile, String wordsThetaFile, String orgDocsFile, String docsThetaFile) {
        int numberOfTopics = this.loadWordsAndTheta(wordsFile, wordsThetaFile);
        get_idf(orgDocsFile);
        BufferedReader orgDocsReader = this.getReader(orgDocsFile, "utf-8");
        BufferedWriter docsThetaWriter = this.getWriter(docsThetaFile, "utf-8");

        try {
            String orgDoc = orgDocsReader.readLine();
            while (orgDoc != null) {
                double[] orgTheta = new double[numberOfTopics];
                String[] tokens = orgDoc.trim().split("\\s+");
                int length = 0;
                for (String token : tokens) {
                    double[] wordTheta = word2theta.get(token);
                    if (wordTheta != null) { // some times word occurs in the original text dose not occur in word network.
                        for (int i = 0; i != numberOfTopics; i++)
                            orgTheta[i] += wordTheta[i] * wordWeight.get(token);
                        length++;
                    }
                }
                StringBuilder sb = new StringBuilder("");
                for (int i = 0; i != numberOfTopics; i++) {
                    orgTheta[i] /= length;
                    sb.append(orgTheta[i] + " ");
                }
                docsThetaWriter.append(sb.toString().trim());
                docsThetaWriter.newLine();
                orgDoc = orgDocsReader.readLine();
            }

            orgDocsReader.close();
            docsThetaWriter.flush();
            docsThetaWriter.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private BufferedReader getReader(String path, String charset) {
        BufferedReader reader = null;
        try {
            reader = new BufferedReader(new InputStreamReader(
                    new FileInputStream(path), charset));
        } catch (IOException e) {
            e.printStackTrace();
        }
        return reader;
    }

    private BufferedWriter getWriter(String path, String charset) {
        BufferedWriter writer = null;
        try {
            writer = new BufferedWriter(new OutputStreamWriter(
                    new FileOutputStream(path), charset));
        } catch (IOException e) {
            e.printStackTrace();
        }
        return writer;
    }
}
