package com.hccl.service.classifier;
import org.ansj.domain.Term;
import org.ansj.splitWord.analysis.ToAnalysis;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.*;

import static com.hccl.config.Constants.*;
/**
 * Created by yang on 2018/8/6.
 */
public class dataprovider {
    Map<String, Integer> word2IdMap;
    Map<String, Integer> pos2IdMap;
    //Map<String, Integer> char2IdMap;
    List<List<Term>> termLists = new ArrayList<>();
    String[] sentences;
    public dataprovider(Map<String, Integer> wordMap, Map<String, Integer> posMap, ToAnalysis toAnalysis, String sentencesStr){

        this.word2IdMap = wordMap;
        //this.char2IdMap = charMap;
//        this.toAnalysis = toAnalysis;
        this.pos2IdMap = posMap;


        sentences = sentencesStr.split("\n");

        for (String sentence : sentences) {
            if (!sentence.trim().equals("")) {
                try {
                    sentence = sentence.trim().replaceAll("[\\pP+~$`^=|<>～｀＄＾＋＝｜＜＞￥×]", "");
                    List<Term> terms = toAnalysis.parseStr(sentence).getTerms();
                    termLists.add(terms);
                } catch (Exception e) {
                    e.printStackTrace();
                    //logger.error(sentence);
                }

            }
        }


    }
    /** 获取模型输入特征
     * @param modeltype 模型类型
     * @return 特征map
     */
    public Map getData(boolean modeltype) {

        int doculen = sentences.length;
        long[] slen = new long[(int) doculen];
        //long[] wordData = new long[doclen * sentlen];
        int[][] wordData = new int[doculen][sentlen];
        //long[] charData = new long[doclen * charlen];
        int[][] posData = new int[doculen][sentlen];
        int[][] positionData = new int[doculen][sentlen];

        int sentCount = 0;
        //int countW = 0;
        //int countC = 0;
        //对每个句子
        for (String sentence : sentences) {
            //if (sentCount > doclen - 1)   //最长20个句
                //break;
            if (!sentence.trim().equals("")) {
                sentence = sentence.trim().replaceAll("[\\pP+~$`^=|<>～｀＄＾＋＝｜＜＞￥×]", "");
                List<Term> termList = termLists.get(sentCount);
                slen[sentCount] = Math.min(termList.size(), sentlen);    //句子词长,最大30

                int wordCount = 0;
                //对每一个词
                for (Term term : termList) {
                    if (wordCount > sentlen - 1)
                        break;
                    try {
                        wordData[sentCount][wordCount] = word2IdMap.get(term.getName());
                        posData[sentCount][wordCount] = pos2IdMap.get(term.getNatureStr());

                    } catch (Exception e) {
                        wordData[sentCount][wordCount] = 0;
                        posData[sentCount][wordCount] = 0;

                    }
                    positionData[sentCount][wordCount] = wordCount;
                    wordCount++;
                    //countW++;
                }

                if (modeltype) {
                    //word补零
                    while (sentlen - wordCount > 0) {
                        wordData[sentCount][wordCount] = 0;
                        posData[sentCount][wordCount] = 0;
                        positionData[sentCount][wordCount] = wordCount;
                        wordCount++;
                        //countW++;
                    }
                }

                int charCount = 0;
                for (char c : sentence.toCharArray()) {
                    if (charCount > charlen - 1)
                        break;

                    charCount++;

                }
                sentCount++;
            }

        }
        Map predict_feature = new HashMap();

        predict_feature.put("data", wordData);
        predict_feature.put("posdata", posData);
        predict_feature.put("positiondata", positionData);
        //predict_feature.put("char_data", charData);
        predict_feature.put("doclen", doculen);
        predict_feature.put("sentlen", slen);
        //predict_feature.put("charlen", clen);

        return predict_feature;

    }

    /** 读取词典
     * @param dicPath 词典路径
     * @return 词典map
     */
    public static Map<String, Integer> readDic(String dicPath) throws Exception {
        Map<String, Integer> map = new HashMap<>();
        String line;

        BufferedReader rd = new BufferedReader(new InputStreamReader(new FileInputStream(dicPath), "utf-8"));
        while ((line = rd.readLine()) != null) {
            List<String> items = Arrays.asList(line.split("\t"));
            map.put(items.get(0), Integer.parseInt(items.get(1)));

        }
        rd.close();

        return map;
    }
    public static Map<String, String> readMap(String dicPath) throws Exception {
        Map<String, String> map = new HashMap<>();
        String line;

        BufferedReader rd = new BufferedReader(new InputStreamReader(new FileInputStream(dicPath), "utf-8"));
        while ((line = rd.readLine()) != null) {
            List<String> items = Arrays.asList(line.split("\t"));
            map.put(items.get(1), items.get(0));

        }
        rd.close();
        return map;
    }

}
