package com.huiquan.zs.service;

import com.huiquan.analysis.constants.AnalysisDictionaryConstant;
import com.huiquan.analysis.constants.AnalysisSentenceConstant;
import com.huiquan.analysis.constants.RedisConstant;
import com.huiquan.analysis.dao.AnalysisDictionaryDao;
import com.huiquan.analysis.dao.AnalysisSentenceDao;
import com.huiquan.analysis.dao.AnalysisVocabularyDao;
import com.huiquan.analysis.domain.*;
import com.huiquan.analysis.service.SimilarityAlgo;
import com.huiquan.elasticsearch.service.ElasticsearchService;
import com.huiquan.foundation.util.BusinessUtil;
import com.huiquan.framework.base.BaseService;
import com.huiquan.framework.base.ReturnCode;
import com.huiquan.framework.base.ReturnData;
import com.huiquan.framework.utils.DateUtils;
import com.huiquan.framework.utils.GetListUtil;
import com.huiquan.framework.utils.ReturnUtil;
import com.huiquan.sphinx.SphinxClient;
import com.huiquan.sphinx.SphinxException;
import com.huiquan.sphinx.SphinxMatch;
import com.huiquan.sphinx.SphinxResult;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.stereotype.Service;

import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.*;

@Service
public class ZsSentenceService extends BaseService {

    @Autowired
    private AnalysisSentenceDao analysisSentenceDao;
    @Autowired
    private AnalysisVocabularyDao analysisVocabularyDao;

    @Autowired
    private AnalysisDictionaryDao analysisDictionDao;

    @Autowired
    private RedisTemplate<String, List<AnalysisSentence>> redisTemplate;

    @Autowired
    private RedisTemplate<String, Object> redisStrTemplate;

    @Value("${sphinx.host}")
    private String sphinxHost;

    @Autowired
    private ElasticsearchService elasticsearchService;


    /**
     * 获取主诉标记界面需要的数据
     *
     * @param idStr
     * @return
     */
    public Map<String, Object> zsPreEdit(String idStr) {
        Long id = Long.parseLong(idStr);
        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveObjectById(id);
        List<AnalysisVocabulary> vocabularys = analysisVocabularyDao.retriveListBySid(id);
        // 获取属性和词性集合，并将分词中不存在的数据添加
        Map<String, String> characterMap = new LinkedHashMap<>();
        characterMap.putAll(AnalysisSentenceConstant.getPropertyMapByType(AnalysisSentenceConstant.CHARACTER_LABEL));
        Map<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_ZS, ""));


        for (AnalysisVocabulary term : vocabularys) {
            if (!characterMap.containsKey(term.getCharacteristic())) {
                characterMap.put(term.getCharacteristic(), term.getCharacteristic());
            }
            if (!propertyMap.containsKey(term.getProperty())) {
                propertyMap.put(term.getProperty(), term.getProperty());
            }
        }

        Map<String, Object> map = new HashMap<>();
        map.put("id", id);
        map.put("sentence", analysisSentence.getSentence());
        map.put("vocabularys", vocabularys);
        map.put("characterMap", characterMap);
        map.put("propertyMap", propertyMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        return map;
    }

    /**
     * 获取未标注的句子列表
     *
     * @param startStr
     * @param searchParams
     * @return
     */
    public Map<String, Object> sentenceUnlabelList(String startStr, List<AnalysisSearchParam> searchParams) {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);
        // 获取列表
        List<AnalysisSentence> list = elasticsearchService.getZSUnlabeledListFromES(searchParams);

        // 获取数据总量
        int totalSize = list.size();

        // 获取翻页相关
        Map<String, Integer> pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        if (startIndex < totalSize) {
            int endIndex = startIndex + 20 > totalSize ? totalSize : startIndex + 20;
            list = list.subList(startIndex, endIndex);
        }

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, null);
        LinkedHashMap<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.put(" ", " ");

        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_ZS, null));
        map.put("propertyMap", propertyMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        return map;
    }

    /**
     * 获取已标注的句子列表
     *
     * @param startStr
     * @param searchParams
     * @return
     */
    public Map<String, Object> sentenceLabeledList(String startStr, List<AnalysisSearchParam> searchParams) {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);
        // 获取列表
        List<AnalysisSentence> list = elasticsearchService.getZSLabeledListFromES(searchParams);

        // 获取数据总量
        int totalSize = list.size();

        // 获取翻页相关
        Map<String, Integer> pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        if (startIndex < totalSize) {
            int endIndex = startIndex + 20 > totalSize ? totalSize : startIndex + 20;
            list = list.subList(startIndex, endIndex);
        }

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, null);
        LinkedHashMap<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.put(" ", " ");

        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_ZS, null));
        map.put("propertyMap", propertyMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        return map;
    }

    /**
     * 删除主诉
     *
     * @param idStr
     * @return
     */
    public ReturnData deleteZs(String idStr) {
        return delete(AnalysisSentenceConstant.TYPE_ZS + "", null, idStr);
    }

    /**
     * 删除句子
     *
     * @param typeStr
     * @param secondType
     * @param idStr
     * @return
     */
    public ReturnData delete(String typeStr, String secondType, String idStr) {
        Long id = Long.parseLong(idStr);
        int type = Integer.parseInt(typeStr);

        // 从analysis库中删除，移入已删除的表中
        removeAnalysisToDel(id);

        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveDelObjectById(id);

        LOGGER.info("更新redis开始");
        // 移除redis中未标记列表的相关元素，再向已标记列表中添加相关元素
        String queryStr = "";
        if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_LABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_UNLABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_UNLABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_UNLABELED;
        }
        removeAnalySentenceceOfRedis(id, queryStr);
        // 由于搜索引擎删除过于繁琐 ，在显示的时候过滤，增加过滤的redis
        ValueOperations<String, Object> valueops = redisStrTemplate.opsForValue();
        Object deleteId = valueops.get(RedisConstant.KEY_ANALYSIS_DELETE);
        if (deleteId != null) {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, deleteId + idStr + ",");
        } else {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, "," + idStr + ",");
        }
        LOGGER.info("更新redis完成");
        return ReturnUtil.success();
    }

    public Map<String, Object> zsDeleteList(String startStr, String sentence, String exactSearchFlag) {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);
        Map<String, Object> param = new HashMap<>();
        // 根据词语查询
        if (sentence != null && !sentence.isEmpty()) {
            if (exactSearchFlag != null && exactSearchFlag.equals("1")) {
                param.put("sentence", "%\t" + sentence + "\t%");
            } else {
                param.put("sentence",
                        "%" + sentence.replace(";", "%").replace("；", "%").replace(",", "%").replace("，", "%") + "%");
            }
        }
        param.put("type", AnalysisSentenceConstant.TYPE_ZS);
        int totalSize = analysisSentenceDao.retrieveDelSize(param);

        Map<String, Integer> pageNo;
        pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        List<AnalysisSentence> list = new ArrayList<>();

        if (startIndex < totalSize) {
            param.put("startIndex", startIndex);
            list = analysisSentenceDao.retrieveDelList(param);
        }
        Map<String, String> showSearch = new LinkedHashMap<>();
        showSearch.put("sentence", "句子");

        Map<String, String> searchMap = new HashMap<>();
        searchMap.put("sentence", sentence);

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, searchMap);
        map.put("showSearch", showSearch);
        map.put("showSearchValue", searchMap);
        return map;
    }

    /**
     * 进行词语编辑操作
     *
     * @param typeStr
     * @param secondType
     * @param idStr
     * @param user
     * @param vocabularyStrs
     * @param characterStrs
     * @param propertyStrs
     * @param position1Strs
     * @param position2Strs
     * @param sourceStrs
     * @return
     * @throws Exception
     */
    public ReturnData edit(String typeStr, String secondType, String idStr, User user, String[] vocabularyStrs,
                           String[] characterStrs, String[] propertyStrs, String[] position1Strs, String[] position2Strs,
                           HttpServletRequest req, String[] sourceStrs) throws Exception {
        Long id = Long.parseLong(idStr);
        int type = Integer.parseInt(typeStr);
        // 获取句子
        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveObjectById(id);

        // 校验参数词语是否与句子匹配
        boolean vocabularyFlag = checkVocabulary(vocabularyStrs, analysisSentence.getSentence());
        if (!vocabularyFlag) {
            return ReturnUtil.fail(ReturnCode.ANALYSIS_VOCABULARYS_CHECK_ERROR);
        }

        // 将参数拼凑成词语实体
        List<AnalysisVocabulary> vocabularys = getVocabularysByParam(vocabularyStrs, characterStrs, propertyStrs,
                position1Strs, position2Strs, id, type);

        // 对比修改前后的分词，调用脚本修改词库
        List<AnalysisVocabulary> rawVocabularys = analysisVocabularyDao.retriveListBySid(id);
        ReturnData result = updateDictionary(vocabularys, rawVocabularys, analysisSentence, user, req, secondType);
        if (!result.getReturnCode().getCode().equals("0")) {
            return result;
        }

        // 更新分词和句子
        updateSentenceAndVocabularys(analysisSentence, vocabularys, user);

        return ReturnUtil.success();
    }

    private void updateSentenceAndVocabularys(AnalysisSentence analysisSentence, List<AnalysisVocabulary> vocabularys,
                                              User user) throws Exception {


        Long sid = analysisSentence.getId();

        //删除老索引，
        elasticsearchService.deleteZSContentFromESBySid(sid);

        LOGGER.info("开始删除历史词语");
        // 删除历史词语
        analysisVocabularyDao.deleteVocabulaysBySid(sid);
        LOGGER.info("逻辑删除搜索引擎中的历史词语");
        LOGGER.info("新增修改后词语");
        // 新增修改后词语
        analysisVocabularyDao.batchInsert(vocabularys);
        LOGGER.info("修改句子");
        // 修改句子
        analysisSentence.setLabelStatus(AnalysisSentenceConstant.LABEL_STATUS_LABELED);
        analysisSentence.setModifierId(user.getUserId());
        analysisSentenceDao.update(analysisSentence);
        LOGGER.info("更新分词和句子完成");
        //这里的延迟1秒，是因为ES删除索引并不是无时间开销。
        Thread.sleep(1000);
        List<AnalysisSentenceVocabularyES> ESList = analysisSentenceDao.retrieveZsVocabularyListBySid(sid);
        //添加新索引
        elasticsearchService.addZSContentToES(ESList);
    }


    /**
     * 维护词典
     *
     * @param vocabularys    修改后分词
     * @param rawVocabularys 修改前分词
     * @param sentence       句子
     * @param user           用户
     * @return
     */
    private ReturnData updateDictionary(List<AnalysisVocabulary> vocabularys, List<AnalysisVocabulary> rawVocabularys,
                                        AnalysisSentence sentence, User user, HttpServletRequest req, String secondType) {
        LOGGER.info("开始维护词典，sentence=" + sentence.getSentence());
        Map<String, Object> searchParam = null;
        int newIndex = 0, rawIndex = 0, index = 0, count = 0;
        String newStr = "", rawStr = "";
        boolean mergeFlag = true;
        String chara = null;
        // 设定前后分词各自的游标，从头开始一点点对比
        while (newIndex < vocabularys.size() && rawIndex < rawVocabularys.size()) {
            AnalysisVocabulary rawVocabulary = rawVocabularys.get(rawIndex);
            AnalysisVocabulary newVocabulary = vocabularys.get(newIndex);

            if (!rawVocabulary.getVocabulary().equals(newVocabulary.getVocabulary())) {
                // 多对多的拆分属于拆词
                newStr = newVocabulary.getVocabulary();
                rawStr = rawVocabulary.getVocabulary();
                index = newIndex;
                while (!newStr.equals(rawStr)) {
                    if (rawStr.contains(newStr)) {
                        // 如果老短语包含新短语，则新短语继续加长，此时则不是组合类型
                        newStr += vocabularys.get(newIndex + 1).getVocabulary();
                        newIndex++;
                        mergeFlag = false;
                    } else if (newStr.contains(rawStr)) {
                        // 如果新短语包含老短语，则老短语继续加长
                        rawStr += rawVocabularys.get(rawIndex + 1).getVocabulary();
                        rawIndex++;
                        chara = rawVocabulary.getCharacteristic();
                    } else {
                        // 如果新老短语互不包含，则异常
                        LOGGER.error("Sentence update error 1, sid=[{}], rawStr=[{}], newStr=[{}]",
                                new Object[]{sentence.getId(), rawStr, newStr});
                        break;
                    }
                }
                // 循环结束后newStr与rawStr必定相同，如果不同则有异常，打出日志
                if (!newStr.equals(rawStr)) {
                    LOGGER.error("Sentence update error 2, sid=[{}], rawStr=[{}], newStr=[{}]",
                            new Object[]{sentence.getId(), rawStr, newStr});
                } else {
                    if (mergeFlag) {
                        // 组合情况
                        AnalysisDictionary dic = new AnalysisDictionary();
                        dic.setVocabulary(newStr);
                        dic.setDictionary(newStr + '\t' + newVocabulary.getCharacteristic() + '\t' + "1000");
                        dic.setDeleteFlag(AnalysisDictionaryConstant.ANALYSIS_DICTIONARY_DELETE_FLAG_N);
                        if ("mqt".equals(chara)) {
                            dic.setType(AnalysisDictionaryConstant.ANALYSIS_DICTIONARY_TYPE_TIME);
                        } else {
                            dic.setType(getUserLibraryDictType(sentence.getType(), secondType));
                        }
                        dic.setModifierId(user.getUserId());
                        dic.setModifierName(user.getRealName());
                        // 如果词典在用户词典中已存在
                        searchParam = new HashMap<>();
                        if ("mqt".equals(chara)) {
                            searchParam.put("type", AnalysisDictionaryConstant.ANALYSIS_DICTIONARY_TYPE_TIME);
                        } else {
                            searchParam.put("type", getUserLibraryDictType(sentence.getType(), secondType));
                        }
                        searchParam.put("keyword", newStr);
                        count = analysisDictionDao.selectAnalysisDictionaryCount(searchParam);
                        if (count > 0) {
                            Map<String, Object> map = new HashMap<>();
                            map.put("mergeFlag", mergeFlag);
                            map.put("searchParam", searchParam);
                            map.put("newStr", newStr);
                            map.put("dic", dic);
                            map.put("vocabularys", vocabularys);
                            map.put("sentence", sentence);
                            req.getSession().setAttribute("objMap", map);
                            ReturnCode returnCode = ReturnCode.DYNAMIC_DESC_EXCEPTION;
                            returnCode.setDesc(newStr + "  分词规则与用户词典冲突，是否确定提交？");
                            return new ReturnData(returnCode, map);
                        }
                        // 如果词典语句在外部词典中已存在
                        if (analysisDictionDao.selectOutsideDictionaryCountByVocabulary(newStr) > 0) {
                            analysisDictionDao.deleteOutsideDictionaryByVocabulary(newStr);
                        }
                        // 如果词典在歧义词典中已存在
                        searchParam.put("type", getAmbiguityDictType(sentence.getType(), secondType));
                        count = analysisDictionDao.selectAnalysisDictionaryCount(searchParam);
                        if (count > 0) {
                            // 则将整句分词结果添加到歧义词典中
                            dic.setVocabulary(sentence.getSentence());
                            dic.setDictionary(this.getSentenceDictionary(sentence, vocabularys));
                            dic.setType(getAmbiguityDictType(sentence.getType(), secondType));
                        }
                        // 最后插入词典
                        LOGGER.info("插入词典，dic：{}", dic.toString());
                        analysisDictionDao.insertAnalysisDictionary(dic);
                    } else {
                        // 拆词情况
                        StringBuilder builder = new StringBuilder(newStr);
                        for (; index <= newIndex; index++) {
                            builder.append('\t').append(vocabularys.get(index).getVocabulary()).append('\t')
                                    .append(vocabularys.get(index).getCharacteristic());
                        }
                        AnalysisDictionary dic = new AnalysisDictionary();
                        dic.setVocabulary(newStr);
                        dic.setDictionary(builder.toString());
                        dic.setDeleteFlag(AnalysisDictionaryConstant.ANALYSIS_DICTIONARY_DELETE_FLAG_N);
                        dic.setType(getAmbiguityDictType(sentence.getType(), secondType));
                        dic.setModifierId(user.getUserId());
                        dic.setModifierName(user.getRealName());
                        // 如果词典在歧义词典中已存在
                        searchParam = new HashMap<>();
                        searchParam.put("type", getAmbiguityDictType(sentence.getType(), secondType));
                        searchParam.put("keyword", newStr);
                        count = analysisDictionDao.selectAnalysisDictionaryCount(searchParam);
                        if (count > 0) {
                            Map<String, Object> map = new HashMap<>();
                            map.put("mergeFlag", mergeFlag);
                            map.put("searchParam", searchParam);
                            map.put("newStr", newStr);
                            map.put("dic", dic);
                            map.put("vocabularys", vocabularys);
                            map.put("sentence", sentence);
                            req.getSession().setAttribute("objMap", map);
                            ReturnCode returnCode = ReturnCode.DYNAMIC_DESC_EXCEPTION;
                            returnCode.setDesc(newStr + " 分词规则与歧义词典冲突，是否确定提交？");
                            return new ReturnData(returnCode, map);
                        }
                        // 如果词典语句在外部词典中已存在
                        if (analysisDictionDao.selectOutsideDictionaryCountByVocabulary(newStr) > 0) {
                            analysisDictionDao.deleteOutsideDictionaryByVocabulary(newStr);
                        }
                        // 如果词典在用户词典中已存在
                        searchParam.put("type", getUserLibraryDictType(sentence.getType(), secondType));
                        count = analysisDictionDao.selectAnalysisDictionaryCount(searchParam);
                        if (count > 0) {
                            // 则将整句分词结果添加到歧义词典中
                            dic.setVocabulary(sentence.getSentence());
                            dic.setDictionary(this.getSentenceDictionary(sentence, vocabularys));
                            dic.setType(getAmbiguityDictType(sentence.getType(), secondType));
                        }
                        // 最后插入词典
                        LOGGER.info("插入词典，dic：{}", dic.toString());
                        analysisDictionDao.insertAnalysisDictionary(dic);
                    }
                }

                // 临时变量还原
                newStr = "";
                rawStr = "";
                mergeFlag = true;
            }
            newIndex++;
            rawIndex++;
        }
        LOGGER.info("维护词典完成");
        return ReturnUtil.success();
    }


    /**
     * 得到整句对应的词典内容
     *
     * @param sentence
     * @param vocabularys
     * @return
     */
    private String getSentenceDictionary(AnalysisSentence sentence, List<AnalysisVocabulary> vocabularys) {
        StringBuilder sb = new StringBuilder();
        sb.append(sentence.getSentence());
        for (AnalysisVocabulary v : vocabularys) {
            sb.append('\t').append(v.getVocabulary()).append('\t').append(v.getCharacteristic());
        }
        return sb.toString();
    }

    /**
     * 根据type和secondType得到用户字典类型
     *
     * @param type
     * @param secondType
     * @return
     */
    private int getUserLibraryDictType(int type, String secondType) {
        if (type == AnalysisSentenceConstant.TYPE_ZS) {
            return AnalysisDictionaryConstant.ANALYSIS_DICTIONARY_TYPE_USERLIBRARY;
        }
        return AnalysisDictionaryConstant.ANALYSISDictionaryUserLibrarySecondTypeMap.get(secondType);
    }

    /**
     * 根据type和secondType得到歧义字典类型
     *
     * @param type
     * @param secondType
     * @return
     */
    private int getAmbiguityDictType(int type, String secondType) {
        if (type == AnalysisSentenceConstant.TYPE_ZS) {
            return AnalysisDictionaryConstant.ANALYSIS_DICTIONARY_TYPE_AMBIGUITY;
        }
        return AnalysisDictionaryConstant.ANALYSISDictionaryAmbiguitySecondTypeMap.get(secondType);
    }

    /**
     * 判断前后词语是否一样~是否进行修改
     *
     * @param rawVocabularys
     * @param vocabularys
     * @return
     */
    private boolean checkVocabularySame(List<AnalysisVocabulary> rawVocabularys, List<AnalysisVocabulary> vocabularys) {
        if (rawVocabularys.size() != vocabularys.size()) {
            return false;
        }
        for (int i = 0; i < rawVocabularys.size(); i++) {
            if (!StringUtils.equals(rawVocabularys.get(i).getPosition2().trim(), vocabularys.get(i).getPosition2().trim())
                    || !StringUtils.equals(rawVocabularys.get(i).getPosition1().trim(), vocabularys.get(i).getPosition1().trim())
                    || !StringUtils.equals(rawVocabularys.get(i).getCharacteristic().trim(), vocabularys.get(i).getCharacteristic().trim())
                    || !StringUtils.equals(rawVocabularys.get(i).getVocabulary(), vocabularys.get(i).getVocabulary())
                    || !StringUtils.equals(rawVocabularys.get(i).getProperty().trim(), vocabularys.get(i).getProperty().trim())) {
                return false;
            }
        }
        return true;
    }

    public boolean checkVocabulary(String[] vocabularyStrs, String sentence) {
        if (vocabularyStrs == null || vocabularyStrs.length == 0) {
            return false;
        }
        StringBuffer temp = new StringBuffer();
        for (String vocabulary : vocabularyStrs) {
            temp.append(vocabulary);
        }
        if (temp.toString().equals(sentence)) {
            return true;
        }
        return false;
    }

    private List<AnalysisVocabulary> getVocabularysByParam(String[] vocabularyStrs, String[] characterStrs,
                                                           String[] propertyStrs, String[] position1Strs, String[] position2Strs, Long sid, int type) {
        List<AnalysisVocabulary> vocabularys = new ArrayList<>();
        for (int i = 0; i < vocabularyStrs.length; i++) {
            AnalysisVocabulary vocabulary = new AnalysisVocabulary();
            vocabulary.setSid(sid);
            vocabulary.setVocabulary(vocabularyStrs[i]);
            vocabulary.setCharacteristic(characterStrs[i]);
            vocabulary.setProperty(propertyStrs[i]);
            vocabulary.setPosition1(position1Strs[i]);
            vocabulary.setPosition2(position2Strs[i]);
            vocabulary.setType(type);
            vocabulary.setInitial(vocabularyStrs[i].substring(0, 1));

            vocabularys.add(vocabulary);
        }

        return vocabularys;
    }

    public ReturnData recoverZs(String idStr) {
        return recoverAnalysis(AnalysisSentenceConstant.TYPE_ZS + "", null, idStr);
    }

    /**
     * 异步更新搜索引擎的索引
     *
     * @param index
     */
    public void asynchronousUpdateSphinxIndex(String index) {
        Thread t = new Thread(new ZsSentenceService.ExecuteUpdateSphinx(index));
        t.start();
    }

    class ExecuteUpdateSphinx implements Runnable {

        private String index;

        ExecuteUpdateSphinx(String index) {
            this.index = index;
        }

        @Override
        public void run() {
            updateSphinxIndex(index);
        }
    }

    @Value("${sphinx.index.rotate}")
    private String rotateIndexScript;

    /**
     * 更新搜索引擎的索引
     *
     * @param index
     * @throws IOException
     */
    public void updateSphinxIndex(String index) {
        LOGGER.info("搜索引擎：更新索引[{}]开始", index);
        String cmds[] = new String[]{rotateIndexScript, index};
        try {
            BusinessUtil.excuteShell(cmds);
        } catch (IOException e) {
            e.printStackTrace();
            LOGGER.info("搜索引擎异常：更新索引[{}]失败", index);
        }
        LOGGER.info("搜索引擎：更新索引[{}]完成", index);
    }

    /**
     * 恢复句子
     *
     * @param typeStr
     * @param secondType
     * @param idStr
     * @return
     */
    public ReturnData recoverAnalysis(String typeStr, String secondType, String idStr) {
        Long id = Long.parseLong(idStr);
        int type = Integer.parseInt(typeStr);
        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveDelObjectById(id);
        Map<String, Object> param = new HashMap<>();
        param.put("sid", id);
        List<AnalysisVocabulary> vocabularys = analysisVocabularyDao.retrieveDelList(param);
        // 从analysis库中删除，移入已删除的表中
        long newId = analysisSentenceDao.recoverAnalysis(id);
        analysisSentenceDao.recoverAnalysisVocab(id, newId);

        LOGGER.info("更新redis开始");
        // 添加到redis中
        String queryStr = "";
        if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_LABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_UNLABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_UNLABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_UNLABELED;
        }
        // 换成新的ID
        analysisSentence.setId(newId);
        addAnalySentenceceInRedis(analysisSentence, queryStr, vocabularys);

        LOGGER.info("更新redis完成");
        return ReturnUtil.success();
    }

    /**
     * 向redis中的list中插入相关元素
     *
     * @param analysisSentence
     * @param keyPattern
     * @param vocabularys
     */
    private void addAnalySentenceceInRedis(AnalysisSentence analysisSentence, String keyPattern,
                                           List<AnalysisVocabulary> vocabularys) {
        ValueOperations<String, List<AnalysisSentence>> valueops = redisTemplate.opsForValue(); // redis数据操作对象
        // 得到key的集合
        Set<String> keys = redisTemplate.keys(keyPattern);
        Iterator<String> iterator = keys.iterator();
        String key = null;
        List<AnalysisSentence> data = null;
        List<AnalysisSentence> resultList = null;
        // 循环遍历集合
        while (iterator.hasNext()) {
            key = iterator.next();
            data = valueops.get(key);
            // 判断更改词语后的语句是否还满足搜索条件
            boolean flag = canSelect(getSearchParamsByKey(key, analysisSentence.getType()), vocabularys);
            resultList = addElementsInList(data, analysisSentence, flag);
            // 将list放入redis覆盖掉原list
            valueops.set(key, resultList);
        }
    }

    /**
     * 向list中添加一个语句
     *
     * @param list    语句列表
     * @param element 要添加的语句
     * @param flag    是否添加flag
     * @return
     */
    private List<AnalysisSentence> addElementsInList(List<AnalysisSentence> list, AnalysisSentence element,
                                                     boolean flag) {
        Iterator<AnalysisSentence> iterator = list.iterator();
        Long id = element.getId();
        AnalysisSentence analysisSentence = null;
        /* 循环遍历List，如果list中已存在相同id的元素就将其移除 */
        while (iterator.hasNext()) {
            analysisSentence = iterator.next();
            if (id.equals(analysisSentence.getId())) {
                iterator.remove();
            }
        }
        /* 如果flag为true，在List的头部加入元素 */
        if (flag) {
            list.add(0, element);
        }
        return list;
    }

    /**
     * 根据key逆向解析得到SearchParam列表
     *
     * @param key
     * @return
     */
    private List<AnalysisSearchParam> getSearchParamsByKey(String key, int type) {
        if (!key.endsWith("desc")) {
            key = key.substring(0, key.lastIndexOf(1));
        }
        String[] s = key.split(String.valueOf((char) 1));
        int limit = 3;
        int i = 2;
        if (AnalysisSentenceConstant.TYPE_XBS == type) {
            limit = 4;
            i = 3;
        }
        if (s.length <= limit) {
            return null;
        }
        List<AnalysisSearchParam> result = new ArrayList<>();
        AnalysisSearchParam param = null;
        for (; i < s.length - 2; ) {
            param = new AnalysisSearchParam();
            param.setId(Long.valueOf(s[i++]));
            param.setVocabulary(s[i++]);
            param.setProperty(s[i++]);
            param.setPosition1(s[i++]);
            param.setPosition2(s[i++]);
            param.setOppositePropertyFlag(s[i++]);
            param.setOppositePosition1Flag(s[i++]);
            result.add(param);
        }
        return result;
    }

    /**
     * 判断语句的词语是否满足检索条件
     *
     * @param searchParams 检索条件
     * @param vocabularys  词语
     * @return true 满足，false 不满足。
     */
    private boolean canSelect(List<AnalysisSearchParam> searchParams, List<AnalysisVocabulary> vocabularys) {
        if (searchParams == null) {
            return true;
        }
        if (vocabularys == null) {
            return false;
        }
        for (AnalysisSearchParam serachParam : searchParams) {
            boolean flag = true; // 设置一个flag来记录当前serachPara是否被满足
            for (AnalysisVocabulary analysisVocabulary : vocabularys) {
                // 如果词语型匹配
                if (serachParam.getVocabulary().equals(analysisVocabulary.getVocabulary())) {
                    /* 如果属性项不为空 */
                    if (StringUtils.isNotEmpty(serachParam.getProperty())) {
                        /* 如果是正向搜索且属性值不匹配,结果为false */
                        if ("0".equals(serachParam.getOppositePropertyFlag())
                                && !serachParam.getProperty().equals(analysisVocabulary.getProperty())) {
                            return false;
                        }
                        /* 如果是反向搜索且属性值匹配,结果为false */
                        if ("1".equals(serachParam.getOppositePropertyFlag())
                                && serachParam.getProperty().equals(analysisVocabulary.getProperty())) {
                            return false;
                        }
                    }
                    /* 如果位置项1不为空且不匹配,结果为false */
                    if (StringUtils.isNotEmpty(serachParam.getPosition1())) {
                        /* 如果是正向搜索且位置项1不匹配,结果为false */
                        if ("0".equals(serachParam.getOppositePosition1Flag())
                                && !serachParam.getPosition1().equals(analysisVocabulary.getPosition1())) {
                            return false;
                        }
                        /* 如果是反向搜索且位置项1匹配,结果为false */
                        if ("1".equals(serachParam.getOppositePosition1Flag())
                                && serachParam.getPosition1().equals(analysisVocabulary.getPosition1())) {
                            return false;
                        }
                    }
                    /* 如果位置项2不为空且不匹配,结果为false */
                    if (StringUtils.isNotEmpty(serachParam.getPosition2())
                            && !serachParam.getPosition2().equals(analysisVocabulary.getPosition2())) {
                        return false;
                    }
                    // 所有项都匹配，将flag置为false
                    flag = false;
                }
            }
            /* 如果当前searchParam没有被匹配过，结果为false */
            if (flag) {
                return false;
            }
        }
        return true;

    }

    /**
     * 删除句子和词语移入已删除表
     *
     * @param id
     */
    private void removeAnalysisToDel(Long id) {
        analysisSentenceDao.insertDel(id);
    }

    /**
     * 移除redis中的相关语句元素
     *
     * @param id
     * @param keyPattern
     */
    public void removeAnalySentenceceOfRedis(Long id, String keyPattern) {
        ValueOperations<String, List<AnalysisSentence>> valueops = redisTemplate.opsForValue(); // redis数据操作对象
        // 得到key的集合
        Set<String> keys = redisTemplate.keys(keyPattern);
        Iterator<String> iterator = keys.iterator();
        String key = null;
        List<AnalysisSentence> data = null;
        List<AnalysisSentence> resultList = null;
        // 循环遍历集合
        while (iterator.hasNext()) {
            key = iterator.next();
            data = valueops.get(key);
            // 得到移除相关元素后的list
            resultList = removeElementsById(data, id);
            // 将list放入redis覆盖掉原list
            valueops.set(key, resultList);
        }
    }

    /**
     * 移除list中的相关元素
     *
     * @param list
     * @param id
     * @return
     */
    private List<AnalysisSentence> removeElementsById(List<AnalysisSentence> list, Long id) {
        Iterator<AnalysisSentence> iterator = list.iterator();
        AnalysisSentence analysisSentence = null;
        while (iterator.hasNext()) {
            analysisSentence = iterator.next();
            if (id.equals(analysisSentence.getId())) {
                iterator.remove();
            }
        }
        return list;
    }
}


