package com.seanLab.tool.TagSuggestModel.TopicModel;

import com.seanLab.tool.TagSuggestModel.StaticModelProperties;
import org.ansj.domain.Result;
import org.ansj.domain.Term;
import org.ansj.library.DicLibrary;
import org.ansj.recognition.impl.StopRecognition;
import org.ansj.splitWord.analysis.DicAnalysis;
import org.ansj.splitWord.analysis.NlpAnalysis;
import org.ansj.util.MyStaticValue;
//import org.apache.hadoop.conf.Configuration;

import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.logging.Logger;

/**
 * LDA字典模型
 */
public class Vocabulary implements Serializable {
    private static Logger LOG = Logger.getLogger(Vocabulary.class.getName());
//    static {
//        final File dicFolder = new File("workingDir/dict");
//        MyStaticValue.putLibrary(DicLibrary.DEFAULT, dicFolder.getPath());
//        MyStaticValue.reloadLibrary(DicLibrary.DEFAULT);
//    }

    private HashMap<String, Integer> map;
    private int size;

    public Vocabulary(String vocabularyPath, int vocSize) throws IOException {
        load(vocabularyPath, vocSize);
    }

    public void load(String vocabularyPath, int vocSize) throws IOException {
        map = new LinkedHashMap<>();
//        Configuration conf = new Configuration();

//        conf.set("fs.defaultFS", "hdfs://ubuntu1:9000");
//        conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
//        conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
//        FileSystem fs = FileSystem.get(URI.create("hdfs://ubuntu1:9000"), conf);

//        FSDataInputStream reader = new FSDataInputStream(fs.open(new Path(vocabularyPath)));
        BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(vocabularyPath), "utf-8"));
        String line = null;
        int id = 0;
        while ((line = reader.readLine()) != null) {
            String key = line.substring(0, line.lastIndexOf(' '));
            if (map.containsKey(key)) {
                //doesn't check in fist model
                LOG.warning("repeat word: " + key);
                continue;
            }
//            String value = line.substring(line.lastIndexOf(' ') + 1);
            map.put(key, id);
            id++;
            if (id >= vocSize) {
                break;
            }
        }
        reader.close();
//        fs.close();
        size = map.size();
        LOG.info("vocabulary size: " + map.size());
    }

    public Integer getLocation(String word) {
        return map.get(word);
    }

    public boolean containWord(String word) {return map.containsKey(word);}

    public int getSize() {
        return size;
    }

    public String[] toArray() {
        String[] array = new String[map.size()];
        System.out.println("vocabulary size: " + map.size() + " " + array.length);
        for (Map.Entry<String, Integer> entry : map.entrySet()) {
            array[entry.getValue()] = entry.getKey();
        }
        return array;
    }

    public int[] transDoc(String content) {
        DicAnalysis analysis = new DicAnalysis();
//        StopRecognition filter;
//        {
//            filter = new StopRecognition();
//            filter.insertStopNatures("null");
//            filter.insertStopNatures("w"); //标点符号
//            filter.insertStopNatures("e"); //叹词
//            filter.insertStopNatures("x"); //非语素词
//        }
        Result result = analysis.parseStr(content).recognition(StaticModelProperties.DefaultFilter);
        int[] ret = new int[size];
        for (Term term : result.getTerms()) {
            if (containWord(term.getName())) {
                ret[getLocation(term.getName())] += 1;
            }
        }
        return ret;
    }

    public int[] transDoc2List(String content) {
        DicAnalysis analysis = new DicAnalysis();
//        StopRecognition filter;
//        {
//            filter = new StopRecognition();
//            filter.insertStopNatures("null");
//            filter.insertStopNatures("w"); //标点符号
//            filter.insertStopNatures("e"); //叹词
//            filter.insertStopNatures("x"); //非语素词
//        }
        Result result = analysis.parseStr(content).recognition(StaticModelProperties.DefaultFilter);
        ArrayList<Integer> ret = new ArrayList<>();
        for (Term term : result.getTerms()) {
            if (containWord(term.getName())) {
                ret.add(getLocation(term.getName()));
//                ret[getLocation(term.getName())] += 1;
            }
        }
        int[] a = new int[ret.size()];
        for (int i = 0; i < ret.size(); i++) {
            a[i] = ret.get(i);
        }
        return a;
    }
}
