package com.hccl.service.classifier;

import com.hccl.exception.updateException;
import edu.stanford.nlp.classify.ColumnDataClassifier;
import entities.SLUResult;
import entity.ClassifierResult;

import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;

import static com.hccl.config.Constants.*;

/**
 * Created by sunjia on 2018/1/10.
 * stanford.nlp的接口类
 */

public class MaxEntClassifier {

    ColumnDataClassifier cdc; //分类器
    SegmentUtil segmentUtil;

    public MaxEntClassifier() {
        segmentUtil = new SegmentUtil();
        cdc = new ColumnDataClassifier(classifierPropPath, classifierModelPath);
    }

    public void maxentTrain(String trainFile, String serializeTo){
        try {
            cdc.trainClassifier(trainFile, serializeTo);
        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("分类器训练失败");
        }
    }

    public List<ClassifierResult> maxentPredict(String line) {

        List<ClassifierResult> classifierResults = new ArrayList<>();
        try {
            classifierResults = cdc.predict(line);//分类器耗时仅5ms
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("分类器预测失败");
            return classifierResults;//分类器出错时返回一个空的classifierResults
        }
        return classifierResults;
    }

    //将数据合成txt，并送入分类器训练Model
    public void train() throws Exception {

        //step1 组织训练数据成一个分词后TXT
        HashSet<String> outTrain = new HashSet<String>() {{//不参与训练的command列表
            add("command@简短词");
            add("command@敏感词");
//            add("command@其他业务");
            add("command@闲聊类");
            add("command@业务咨询");
        }};
        OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(classifierTraindataPath,false),"UTF-8");
        File file = new File(corpusPath);
        File[] files = file.listFiles();// 获取目录下的所有文件或文件夹
        for (File f : files) {
            if (f.isFile() && !outTrain.contains(f.getName())) {
                InputStreamReader inputStreamReader = new InputStreamReader(new FileInputStream(f), "utf-8");
                BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
                String label = f.getName();
                String line = "";
                String lineSeged = "";
                while ((line = bufferedReader.readLine()) != null) {
                    lineSeged = segmentUtil.segment(line);
                    writer.write(label + "\t" + lineSeged + "\n");
                }
            }
        }
        writer.close();
        //step2 送入分类器，生成model
        maxentTrain(classifierTraindataPath, classifierModelPath);

    }

    public ArrayList<SLUResult> predict(String input) throws Exception {

        ArrayList<SLUResult> results = new ArrayList<>();
        String inputSeged = segmentUtil.segment(input);
        List<ClassifierResult> classifierResults = maxentPredict("test"+"\t"+inputSeged);
        for (ClassifierResult c : classifierResults) {
            String name = c.getAnswer().split("@")[1];
            String type = c.getAnswer().split("@")[0];
            Double score = c.getScore();
            results.add(new SLUResult(name, type, score,new HashMap<String, String>()));
        }
        return results;
    }

    public void updateModel() throws updateException{
        this.cdc = new ColumnDataClassifier(classifierPropPath, classifierModelPath);
        try {
            train();
        } catch (Exception e) {
           throw new updateException("更新model出错。系统尝试自动恢复...");
        }
    }

}
