package com.sdyc.ndmp.cls.engine;

import com.google.common.io.Files;
import com.sdyc.ndmp.cls.dtd.InputPath;

import java.io.File;
import java.nio.charset.Charset;
import java.util.*;

/**
 *  词权+否定词+程度副词 实现中文舆情分析
 * Created by sssd on 2017/7/20.
 */
public class WordScordPredict implements PredictModel {
    /**
     * 文本分析类名
     */
    protected TextAnalysis textAnalysis = new TextAnalysis();
    /**
     * 情感字典
     */
    protected Map<String,Double> senDict;
    /**
     * 否定词List
     */
    protected List notList;
    /**
     * 程度副词字典
     */
    protected Map<String,Double> degreeDict;

    protected Set stopWords;

    /**
     * 输入路径，返回词权字典+否定词典+程度词典
     * @param inputPath   词典存放路径
     * @throws Exception
     */
    public void init(InputPath inputPath) throws Exception {

        String emtionPath = inputPath.getEmotionPath();
        String denyPath = inputPath.getDenyPath();
        String levelPath = inputPath.getLevelPath();
        String stopPath = inputPath.getStopPath();

        senDict = textAnalysis.readText2Map(emtionPath);
        notList = Files.readLines(new File(denyPath), Charset.forName("UTF-8"));    //返回的是List数组
        degreeDict = textAnalysis.readText2Map(levelPath);
        stopWords = textAnalysis.readStopWords(stopPath);

    }

    /**
     *  对待测语句进行预测。
     * @param sentence  待预测语句
     * @return
     * @throws Exception
     */
    public List predict(String sentence) throws Exception {
        /*
            初始化相关词典
         */
        List preLab  = new ArrayList();
        Map<Integer,Double> senWord = new LinkedHashMap<Integer, Double>();
        Map<Integer,Double> notWord = new LinkedHashMap<Integer, Double>();
        Map<Integer,Double> degreeWord = new LinkedHashMap<Integer, Double>();
        String[] splitSentence = TextAnalysis.getSplitWord(sentence,stopWords).split(" ");

        /*
            将句子中的各类分词分别存储并记录其位置
         */
        for(int i = 0; i<splitSentence.length ; i++){
            if( senDict.keySet().contains(splitSentence[i]) && !notList.contains(splitSentence[i]) && !degreeDict.keySet().contains(splitSentence[i]) ){
                senWord.put(i,senDict.get(splitSentence[i]));
            }else if (notList.contains(splitSentence[i]) && !degreeDict.keySet().contains(splitSentence[i])){
                notWord.put(i,-1.0);
            }else if(degreeDict.keySet().contains(splitSentence[i])){
                degreeWord.put(i,degreeDict.get(splitSentence[i]));
            }
        }

        /*
             情感聚合
         */
        Double score = textAnalysis.scoreSent(senWord,notWord,degreeWord,splitSentence);
        preLab.add(Math.tanh(score));
        return preLab;
    }
}
