package com.match;

import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import com.match.model.DiscussModel;
import com.match.model.KeyGradeModel;
import com.match.model.ResultInfo;
import com.match.utils.GetKeyWordToFile;
import com.match.utils.POIUtils;
import com.match.utils.TxtUtils;
import com.sun.jna.Library;
import com.sun.jna.Native;

public class TestSemanticAnalysis {

    // 定义接口CLibrary，继承自com.sun.jna.Library
    public interface CLibrary extends Library {
        // 定义并初始化接口的静态变量
        CLibrary Instance = (CLibrary) Native
                .loadLibrary("D:\\workspace\\match\\source\\win64\\NLPIR", CLibrary.class);

        public int NLPIR_Init(String sDataPath, int encoding, String sLicenceCode);

        public String NLPIR_ParagraphProcess(String sSrc, int bPOSTagged);

        public String NLPIR_GetKeyWords(String sLine, int nMaxKeyLimit, boolean bWeightOut);

        public String NLPIR_GetFileKeyWords(String sLine, int nMaxKeyLimit, boolean bWeightOut);

        public int NLPIR_AddUserWord(String sWord);// add by qp 2008.11.10

        public int NLPIR_DelUsrWord(String sWord);// add by qp 2008.11.10

        public String NLPIR_GetLastErrorMsg();

        public void NLPIR_Exit();
    }

    // public static String transString(String aidString, String ori_encoding,
    // String new_encoding) {
    // try {
    // return new String(aidString.getBytes(ori_encoding), new_encoding);
    // } catch (UnsupportedEncodingException e) {
    // e.printStackTrace();
    // }
    // return null;
    // }

    static {
        String argu = "D:\\workspace\\match\\source";
        // String system_charset = "GBK";//GBK----0
        String system_charset = "UTF-8";
        int charset_type = 1;

        int init_flag = CLibrary.Instance.NLPIR_Init(argu, charset_type, "0");
        String nativeBytes = null;

        if (0 == init_flag) {
            nativeBytes = CLibrary.Instance.NLPIR_GetLastErrorMsg();
            System.err.println("初始化失败！fail reason is " + nativeBytes);
            // return;
        }
    }

    public static void main(String[] args) throws Exception {


        try {
            List<String> end = new ArrayList<>();
//            List<DiscussModel> mapDiscussTrain = POIUtils
//                    .readExcelWithTitle("F:\\train_first_2.xls", true);
            List<DiscussModel> mapDiscussPredict = POIUtils
                    .readExcelWithTitle("F:\\predict_first.xls", false);
            // 获取各分类的关键字
            // Map<String, Set<String>> keySet = getTypeKey(mapDiscussTrain);
            // 将分类后的关键字写入excel
            // POIUtils.writeExcel(new
            // FileOutputStream("F:\\mapDiscussTrain.xls"), keySet);
//            GetKeyWordToFile.getTypeKey(mapDiscussTrain);
            for (int k = 0; k < mapDiscussPredict.size(); k++) {
                String nativeByte = CLibrary.Instance
                        .NLPIR_GetKeyWords(mapDiscussPredict.get(k).getDiscuss(), 10, true);

                System.out.println("关键词提取结果是：" + mapDiscussPredict.get(k).getId() + "_"
                        + mapDiscussPredict.get(k).getDiscuss() + "_" + nativeByte);
                String[] result = nativeByte.split("#");
                List<List<String>> res = new ArrayList<>();
                for (int i = 0; i < result.length; i++) {
                    String[] keyValue = result[i].split("/");
                    List<String> list = new ArrayList<>();
                    for (int j = 0; j < keyValue.length; j++) {
                        if (1 != j) {
                            list.add(keyValue[j]);
                        }
                    }
                    res.add(list);
                }
                // System.out.println("关键词提取结果数组是：" + res);
                // System.out.println("是否含有农业部：" + res.contains("农业部"));
                // System.out.println("是否含有文学：" + res.contains("文学"));
                List<KeyGradeModel> keyList = new ArrayList<>();
                for (int i = 0; i < res.size(); i++) {
                    KeyGradeModel keyGradeModel = new KeyGradeModel();
                    keyGradeModel.setKeyName(res.get(i).get(0));
                    keyGradeModel.setKeyWeight(new BigDecimal(res.get(i).get(1)));
                    // keyGradeModel.setNumber(Integer.parseInt(res.get(i).get(2)));
                    keyList.add(keyGradeModel);
                }
                List<ResultInfo> resultType = new ArrayList<>();
                List<Object> resultType1 = TxtUtils.getKeyListProbabilityOfTypeTxt(keyList,
                        "F:\\keyword\\type1.txt");
                resultPutToList("1", resultType1, resultType);
                List<Object> resultType2 = TxtUtils.getKeyListProbabilityOfTypeTxt(keyList,
                        "F:\\keyword\\type2.txt");
                resultPutToList("2", resultType2, resultType);
                List<Object> resultType3 = TxtUtils.getKeyListProbabilityOfTypeTxt(keyList,
                        "F:\\keyword\\type3.txt");
                resultPutToList("3", resultType3, resultType);
                List<Object> resultType4 = TxtUtils.getKeyListProbabilityOfTypeTxt(keyList,
                        "F:\\keyword\\type4.txt");
                resultPutToList("4", resultType4, resultType);
                List<Object> resultType5 = TxtUtils.getKeyListProbabilityOfTypeTxt(keyList,
                        "F:\\keyword\\type5.txt");
                resultPutToList("5", resultType5, resultType);
                // 将预测数据的id放入txt
                List<String> predictId = new ArrayList<>(
                        Arrays.asList(mapDiscussPredict.get(k).getId()));
                TxtUtils.writeResultToFile(predictId, "F:\\keyword\\predict_id.txt");
                // 判定这个数据的归类(打分score)
                Collections.sort(resultType, new Comparator<ResultInfo>() {

                    @Override
                    public int compare(ResultInfo o1, ResultInfo o2) {
                        // if (o1.getGroupSn().compareTo(o2.getGroupSn()) == 0)
                        // {
                        // return o1.getNodeName().compareTo(o2.getNodeName());
                        // } else {
                        // return o1.getGroupSn().compareTo(o2.getGroupSn());
                        // }
                        // 降序
                        if (o2.getScore().compareTo(o1.getScore()) == 0) {
                            return o2.getNumber().compareTo(o1.getNumber());
                        }
                        return o2.getScore().compareTo(o1.getScore());
                    }

                });
                end.add(resultType.get(0).getType());
            }
            TxtUtils.writeResultToFile(end, "F:\\keyword\\result.txt");

            // System.out.println(objResult);
            CLibrary.Instance.NLPIR_Exit();

        } catch (Exception ex) {
            ex.printStackTrace();
        }

    }

    /**
     * @Description 获取各类型的关键字集合
     * @author xd 2018年1月25日 下午1:50:31
     * @param mapList
     * @return
     */
    private static Map<String, Set<String>> getTypeKey(List<DiscussModel> mapList) {
        Set<String> type1 = new HashSet<>();
        Set<String> type2 = new HashSet<>();
        Set<String> type3 = new HashSet<>();
        Set<String> type4 = new HashSet<>();
        Set<String> type5 = new HashSet<>();

        for (int i = 0; i < mapList.size(); i++) {
            try {
                System.out.println(mapList.get(i).getId() + "_" + mapList.get(i).getDiscuss() + "_"
                        + mapList.get(i).getScore());
                String nativeByte = CLibrary.Instance.NLPIR_GetKeyWords(mapList.get(i).getDiscuss(),
                        10, false);

                System.out.println("关键词提取结果是：" + nativeByte);
                String[] result = nativeByte.split("#");
                String score = mapList.get(i).getScore().toString();
                if ("1.0".equals(score)) {
                    for (int j = 0; j < result.length; j++) {
                        type1.add(result[j]);
                    }
                } else if ("2.0".equals(score)) {
                    for (int j = 0; j < result.length; j++) {
                        type2.add(result[j]);
                    }
                } else if ("3.0".equals(score)) {
                    for (int j = 0; j < result.length; j++) {
                        type3.add(result[j]);
                    }
                } else if ("4.0".equals(score)) {
                    for (int j = 0; j < result.length; j++) {
                        type4.add(result[j]);
                    }
                } else if ("5.0".equals(score)) {
                    for (int j = 0; j < result.length; j++) {
                        type5.add(result[j]);
                    }
                }
            } catch (Exception e2) {
                e2.printStackTrace();
            } finally {

            }
        }
        Map<String, Set<String>> result = new HashMap<>();
        result.put("type1", type1);
        result.put("type2", type2);
        result.put("type3", type3);
        result.put("type4", type4);
        result.put("type5", type5);
        return result;
    }

    /**
     * @Description 将结果放入list
     * @author xd 2018年1月27日 上午11:45:36
     * @param type
     * @param score
     * @param resultType
     */
    private static void resultPutToList(String type, List<Object> soreType,
            List<ResultInfo> resultType) {
        ResultInfo resultInfo = new ResultInfo();
        resultInfo.setScore((BigDecimal) soreType.get(1));
        resultInfo.setNumber((Integer) soreType.get(0));
        resultInfo.setType(type);
        resultType.add(resultInfo);
    }
}
