package com.huiquan.xbs.service;

import com.huiquan.analysis.constants.AnalysisSentenceConstant;
import com.huiquan.analysis.constants.RedisConstant;
import com.huiquan.analysis.dao.AnalysisSentenceDao;
import com.huiquan.analysis.dao.AnalysisVocabularyDao;
import com.huiquan.analysis.domain.AnalysisSearchParam;
import com.huiquan.analysis.domain.AnalysisSentence;
import com.huiquan.analysis.domain.AnalysisVocabulary;
import com.huiquan.analysis.service.SimilarityAlgo;
import com.huiquan.analysis.utils.ListUtil;
import com.huiquan.foundation.util.BusinessUtil;
import com.huiquan.framework.base.BaseService;
import com.huiquan.framework.base.ReturnData;
import com.huiquan.framework.utils.DateUtils;
import com.huiquan.framework.utils.GetListUtil;
import com.huiquan.framework.utils.ReturnUtil;
import com.huiquan.sphinx.SphinxClient;
import com.huiquan.sphinx.SphinxException;
import com.huiquan.sphinx.SphinxMatch;
import com.huiquan.sphinx.SphinxResult;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.ModelAndView;

import java.io.IOException;
import java.util.*;

@Service
public class XbsSentenceService extends BaseService {

    @Autowired
    private AnalysisSentenceDao analysisSentenceDao;
    @Autowired
    private AnalysisVocabularyDao analysisVocabularyDao;

    @Autowired
    private RedisTemplate<String, List<AnalysisSentence>> redisTemplate;

    @Autowired
    private RedisTemplate<String, Object> redisStrTemplate;

    @Value("${sphinx.host}")
    private String sphinxHost;


    /**
     * 获取主诉标记界面需要的数据
     *
     * @param idStr
     * @return
     */
    public Map<String, Object> zsPreEdit(String idStr) {
        Long id = Long.parseLong(idStr);
        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveObjectById(id);
        List<AnalysisVocabulary> vocabularys = analysisVocabularyDao.retriveListBySid(id);
        // 获取属性和词性集合，并将分词中不存在的数据添加
        Map<String, String> characterMap = new LinkedHashMap<>();
        characterMap.putAll(AnalysisSentenceConstant.getPropertyMapByType(AnalysisSentenceConstant.CHARACTER_LABEL));
        Map<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_XBS, ""));


        for (AnalysisVocabulary term : vocabularys) {
            if (!characterMap.containsKey(term.getCharacteristic())) {
                characterMap.put(term.getCharacteristic(), term.getCharacteristic());
            }
            if (!propertyMap.containsKey(term.getProperty())) {
                propertyMap.put(term.getProperty(), term.getProperty());
            }
        }

        Map<String, Object> map = new HashMap<>();
        map.put("id", id);
        map.put("sentence", analysisSentence.getSentence());
        map.put("vocabularys", vocabularys);
        map.put("characterMap", characterMap);
        map.put("propertyMap", propertyMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        return map;
    }

    /**
     * 获取未标注的句子列表
     *
     * @param startStr
     * @param searchParams
     * @return
     */
    public Map<String, Object> sentenceUnlabelList(String startStr, List<AnalysisSearchParam> searchParams, String secondType) throws SphinxException {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);
        int labelStatus = 0;
        // 初始化过滤的条件
        Map<String, Object> param = new HashMap<>();
        param.put("type", AnalysisSentenceConstant.TYPE_XBS);
        param.put("labelStatus", labelStatus);
        if (searchParams != null && !searchParams.isEmpty()) {
            param.put("searchParams", searchParams);
        }
        param.put("orderStr", "gmt_modified desc");
        param.put("secondType", secondType);
        // 获取列表
        List<AnalysisSentence> list = selectSentenceList(param, AnalysisSentenceConstant.TYPE_XBS, labelStatus, secondType);

        // 获取数据总量
        int totalSize = list.size();

        // 获取翻页相关
        Map<String, Integer> pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        if (startIndex < totalSize) {
            int endIndex = startIndex + 20 > totalSize ? totalSize : startIndex + 20;
            list = list.subList(startIndex, endIndex);
        }

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, null);
        LinkedHashMap<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.put(" ", " ");

        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_XBS, secondType));
        map.put("propertyMap", propertyMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        map.put("secondTypeMap", AnalysisSentenceConstant.secondTypeMap);
        map.put("errorMsg", getSourceErrMsg());
        return map;
    }

    /**
     * 获取已标注的句子列表
     *
     * @param startStr
     * @param searchParams
     * @return
     */
    public Map<String, Object> sentenceLabeledList(String startStr, List<AnalysisSearchParam> searchParams, String secondType) throws SphinxException {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);
        int labelStatus = 1;
        // 初始化过滤的条件
        Map<String, Object> param = new HashMap<>();
        param.put("type", AnalysisSentenceConstant.TYPE_XBS);
        param.put("labelStatus", labelStatus);
        if (searchParams != null && !searchParams.isEmpty()) {
            param.put("searchParams", searchParams);
        }
        param.put("orderStr", "gmt_modified desc");
        param.put("secondType", secondType);
        // 获取列表
        List<AnalysisSentence> list = selectSentenceList(param, AnalysisSentenceConstant.TYPE_XBS, labelStatus, secondType);

        // 获取数据总量
        int totalSize = list.size();

        // 获取翻页相关
        Map<String, Integer> pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        if (startIndex < totalSize) {
            int endIndex = startIndex + 20 > totalSize ? totalSize : startIndex + 20;
            list = list.subList(startIndex, endIndex);
        }

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, null);
        LinkedHashMap<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.put(" ", " ");

        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_XBS, secondType));
        map.put("propertyMap", propertyMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        map.put("secondTypeMap", AnalysisSentenceConstant.secondTypeMap);
        map.put("errorMsg", getSourceErrMsg());
        return map;
    }

    /**
     * 删除主诉
     *
     * @param idStr
     * @return
     */
    public ReturnData deleteXbs(String secondType, String idStr) {
        return delete(AnalysisSentenceConstant.TYPE_XBS + "", secondType, idStr);
    }

    /**
     * 删除句子
     *
     * @param typeStr
     * @param secondType
     * @param idStr
     * @return
     */
    public ReturnData delete(String typeStr, String secondType, String idStr) {
        Long id = Long.parseLong(idStr);
        int type = Integer.parseInt(typeStr);

        // 从analysis库中删除，移入已删除的表中
        removeAnalysisToDel(id);

        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveDelObjectById(id);

        LOGGER.info("更新redis开始");
        // 移除redis中未标记列表的相关元素，再向已标记列表中添加相关元素
        String queryStr = "";
        if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_LABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_UNLABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_UNLABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_UNLABELED;
        }
        removeAnalySentenceceOfRedis(id, queryStr);
        // 由于搜索引擎删除过于繁琐 ，在显示的时候过滤，增加过滤的redis
        ValueOperations<String, Object> valueops = redisStrTemplate.opsForValue();
        Object deleteId = valueops.get(RedisConstant.KEY_ANALYSIS_DELETE);
        if (deleteId != null) {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, deleteId + idStr + ",");
        } else {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, "," + idStr + ",");
        }
        LOGGER.info("更新redis完成");
        return ReturnUtil.success();
    }

    public Map<String, Object> xbsDeleteList(String startStr, String sentence, String exactSearchFlag, String secondType) {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);
        Map<String, Object> param = new HashMap<>();
        // 根据词语查询
        if (sentence != null && !sentence.isEmpty()) {
            if (exactSearchFlag != null && exactSearchFlag.equals("1")) {
                param.put("sentence", "%\t" + sentence + "\t%");
            } else {
                param.put("sentence",
                        "%" + sentence.replace(";", "%").replace("；", "%").replace(",", "%").replace("，", "%") + "%");
            }
        }
        param.put("type", AnalysisSentenceConstant.TYPE_XBS);
        param.put("secondType", secondType);
        int totalSize = analysisSentenceDao.retrieveDelSize(param);

        Map<String, Integer> pageNo;
        pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        List<AnalysisSentence> list = new ArrayList<>();

        if (startIndex < totalSize) {
            param.put("startIndex", startIndex);
            list = analysisSentenceDao.retrieveDelList(param);
        }
        Map<String, String> showSearch = new LinkedHashMap<>();
        showSearch.put("sentence", "句子");

        Map<String, String> searchMap = new HashMap<>();
        searchMap.put("sentence", sentence);

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, searchMap);
        map.put("showSearch", showSearch);
        map.put("showSearchValue", searchMap);
        map.put("secondTypeMap", AnalysisSentenceConstant.secondTypeMap);
        map.put("errorMsg", getSourceErrMsg());
        return map;
    }


    public ReturnData recoverXbs(String secondType, String idStr) {
        return recoverAnalysis(AnalysisSentenceConstant.TYPE_XBS + "", secondType, idStr);
    }

    /**
     * 异步更新搜索引擎的索引
     *
     * @param index
     */
    public void asynchronousUpdateSphinxIndex(String index) {
        Thread t = new Thread(new XbsSentenceService.ExecuteUpdateSphinx(index));
        t.start();
    }

    class ExecuteUpdateSphinx implements Runnable {

        private String index;

        ExecuteUpdateSphinx(String index) {
            this.index = index;
        }

        @Override
        public void run() {
            updateSphinxIndex(index);
        }
    }

    @Value("${sphinx.index.rotate}")
    private String rotateIndexScript;

    /**
     * 更新搜索引擎的索引
     *
     * @param index
     * @throws IOException
     */
    public void updateSphinxIndex(String index) {
        LOGGER.info("搜索引擎：更新索引[{}]开始", index);
        String cmds[] = new String[]{rotateIndexScript, index};
        try {
            BusinessUtil.excuteShell(cmds);
        } catch (IOException e) {
            e.printStackTrace();
            LOGGER.info("搜索引擎异常：更新索引[{}]失败", index);
        }
        LOGGER.info("搜索引擎：更新索引[{}]完成", index);
    }

    /**
     * 恢复句子
     *
     * @param typeStr
     * @param secondType
     * @param idStr
     * @return
     */
    public ReturnData recoverAnalysis(String typeStr, String secondType, String idStr) {
        Long id = Long.parseLong(idStr);
        int type = Integer.parseInt(typeStr);
        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveDelObjectById(id);
        Map<String, Object> param = new HashMap<>();
        param.put("sid", id);
        List<AnalysisVocabulary> vocabularys = analysisVocabularyDao.retrieveDelList(param);
        // 从analysis库中删除，移入已删除的表中
        long newId = analysisSentenceDao.recoverAnalysis(id);
        analysisSentenceDao.recoverAnalysisVocab(id, newId);

        LOGGER.info("更新redis开始");
        // 添加到redis中
        String queryStr = "";
        if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_LABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 1) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_LABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_CT && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_MEDICAL_UNLABELED;

        } else if (type == AnalysisSentenceConstant.TYPE_XBS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_HPI_UNLABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";
        } else if (type == AnalysisSentenceConstant.TYPE_ZS && analysisSentence.getLabelStatus() == 0) {
            queryStr = RedisConstant.KEY_PATTERN_COMPLAIN_UNLABELED;
        }
        // 换成新的ID
        analysisSentence.setId(newId);
        addAnalySentenceceInRedis(analysisSentence, queryStr, vocabularys);

        LOGGER.info("更新redis完成");
        return ReturnUtil.success();
    }

    /**
     * 向redis中的list中插入相关元素
     *
     * @param analysisSentence
     * @param keyPattern
     * @param vocabularys
     */
    private void addAnalySentenceceInRedis(AnalysisSentence analysisSentence, String keyPattern,
                                           List<AnalysisVocabulary> vocabularys) {
        ValueOperations<String, List<AnalysisSentence>> valueops = redisTemplate.opsForValue(); // redis数据操作对象
        // 得到key的集合
        Set<String> keys = redisTemplate.keys(keyPattern);
        Iterator<String> iterator = keys.iterator();
        String key = null;
        List<AnalysisSentence> data = null;
        List<AnalysisSentence> resultList = null;
        // 循环遍历集合
        while (iterator.hasNext()) {
            key = iterator.next();
            data = valueops.get(key);
            // 判断更改词语后的语句是否还满足搜索条件
            boolean flag = canSelect(getSearchParamsByKey(key, analysisSentence.getType()), vocabularys);
            resultList = addElementsInList(data, analysisSentence, flag);
            // 将list放入redis覆盖掉原list
            valueops.set(key, resultList);
        }
    }

    /**
     * 向list中添加一个语句
     *
     * @param list    语句列表
     * @param element 要添加的语句
     * @param flag    是否添加flag
     * @return
     */
    private List<AnalysisSentence> addElementsInList(List<AnalysisSentence> list, AnalysisSentence element,
                                                     boolean flag) {
        Iterator<AnalysisSentence> iterator = list.iterator();
        Long id = element.getId();
        AnalysisSentence analysisSentence = null;
        /* 循环遍历List，如果list中已存在相同id的元素就将其移除 */
        while (iterator.hasNext()) {
            analysisSentence = iterator.next();
            if (id.equals(analysisSentence.getId())) {
                iterator.remove();
            }
        }
        /* 如果flag为true，在List的头部加入元素 */
        if (flag) {
            list.add(0, element);
        }
        return list;
    }

    /**
     * 根据key逆向解析得到SearchParam列表
     *
     * @param key
     * @return
     */
    private List<AnalysisSearchParam> getSearchParamsByKey(String key, int type) {
        if (!key.endsWith("desc")) {
            key = key.substring(0, key.lastIndexOf(1));
        }
        String[] s = key.split(String.valueOf((char) 1));
        int limit = 3;
        int i = 2;
        if (AnalysisSentenceConstant.TYPE_XBS == type) {
            limit = 4;
            i = 3;
        }
        if (s.length <= limit) {
            return null;
        }
        List<AnalysisSearchParam> result = new ArrayList<>();
        AnalysisSearchParam param = null;
        for (; i < s.length - 2; ) {
            param = new AnalysisSearchParam();
            param.setId(Long.valueOf(s[i++]));
            param.setVocabulary(s[i++]);
            param.setProperty(s[i++]);
            param.setPosition1(s[i++]);
            param.setPosition2(s[i++]);
            param.setOppositePropertyFlag(s[i++]);
            param.setOppositePosition1Flag(s[i++]);
            result.add(param);
        }
        return result;
    }

    /**
     * 判断语句的词语是否满足检索条件
     *
     * @param searchParams 检索条件
     * @param vocabularys  词语
     * @return true 满足，false 不满足。
     */
    private boolean canSelect(List<AnalysisSearchParam> searchParams, List<AnalysisVocabulary> vocabularys) {
        if (searchParams == null) {
            return true;
        }
        if (vocabularys == null) {
            return false;
        }
        for (AnalysisSearchParam serachParam : searchParams) {
            boolean flag = true; // 设置一个flag来记录当前serachPara是否被满足
            for (AnalysisVocabulary analysisVocabulary : vocabularys) {
                // 如果词语型匹配
                if (serachParam.getVocabulary().equals(analysisVocabulary.getVocabulary())) {
                    /* 如果属性项不为空 */
                    if (StringUtils.isNotEmpty(serachParam.getProperty())) {
                        /* 如果是正向搜索且属性值不匹配,结果为false */
                        if ("0".equals(serachParam.getOppositePropertyFlag())
                                && !serachParam.getProperty().equals(analysisVocabulary.getProperty())) {
                            return false;
                        }
                        /* 如果是反向搜索且属性值匹配,结果为false */
                        if ("1".equals(serachParam.getOppositePropertyFlag())
                                && serachParam.getProperty().equals(analysisVocabulary.getProperty())) {
                            return false;
                        }
                    }
                    /* 如果位置项1不为空且不匹配,结果为false */
                    if (StringUtils.isNotEmpty(serachParam.getPosition1())) {
                        /* 如果是正向搜索且位置项1不匹配,结果为false */
                        if ("0".equals(serachParam.getOppositePosition1Flag())
                                && !serachParam.getPosition1().equals(analysisVocabulary.getPosition1())) {
                            return false;
                        }
                        /* 如果是反向搜索且位置项1匹配,结果为false */
                        if ("1".equals(serachParam.getOppositePosition1Flag())
                                && serachParam.getPosition1().equals(analysisVocabulary.getPosition1())) {
                            return false;
                        }
                    }
                    /* 如果位置项2不为空且不匹配,结果为false */
                    if (StringUtils.isNotEmpty(serachParam.getPosition2())
                            && !serachParam.getPosition2().equals(analysisVocabulary.getPosition2())) {
                        return false;
                    }
                    // 所有项都匹配，将flag置为false
                    flag = false;
                }
            }
            /* 如果当前searchParam没有被匹配过，结果为false */
            if (flag) {
                return false;
            }
        }
        return true;

    }

    /**
     * 删除句子和词语移入已删除表
     *
     * @param id
     */
    private void removeAnalysisToDel(Long id) {
        analysisSentenceDao.insertDel(id);
    }

    /**
     * 移除redis中的相关语句元素
     *
     * @param id
     * @param keyPattern
     */
    public void removeAnalySentenceceOfRedis(Long id, String keyPattern) {
        ValueOperations<String, List<AnalysisSentence>> valueops = redisTemplate.opsForValue(); // redis数据操作对象
        // 得到key的集合
        Set<String> keys = redisTemplate.keys(keyPattern);
        Iterator<String> iterator = keys.iterator();
        String key = null;
        List<AnalysisSentence> data = null;
        List<AnalysisSentence> resultList = null;
        // 循环遍历集合
        while (iterator.hasNext()) {
            key = iterator.next();
            data = valueops.get(key);
            // 得到移除相关元素后的list
            resultList = removeElementsById(data, id);
            // 将list放入redis覆盖掉原list
            valueops.set(key, resultList);
        }
    }

    /**
     * 移除list中的相关元素
     *
     * @param list
     * @param id
     * @return
     */
    private List<AnalysisSentence> removeElementsById(List<AnalysisSentence> list, Long id) {
        Iterator<AnalysisSentence> iterator = list.iterator();
        AnalysisSentence analysisSentence = null;
        while (iterator.hasNext()) {
            analysisSentence = iterator.next();
            if (id.equals(analysisSentence.getId())) {
                iterator.remove();
            }
        }
        return list;
    }

    /**
     * 得到语句List
     *
     * @param param
     * @param type
     * @param labelStatus
     * @param secondType
     * @return
     * @throws SphinxException
     */
    private List<AnalysisSentence> selectSentenceList(Map<String, Object> param, int type, int labelStatus,
                                                      String secondType) throws SphinxException {
        ValueOperations<String, List<AnalysisSentence>> valueops = redisTemplate.opsForValue(); // redis数据操作对象
        // 得到检索用的key
        String key = getSelectKey(param, secondType);
        // 得到语句列表
        List<AnalysisSentence> list = valueops.get(key);
        // 如果列表不为null且列表的元素数大于0且列表的元素数不为4000时，直接返回list
        if (list != null && list.size() > 0 && list.size() != 4000) {
            return list;
        }
        // 如果列表为null或者列表的元素数为0或者列表的元素数正好为4000时，从数据库中检索
        if (RedisConstant.NULL_SEARCH_COMPLAIN_UNLABELED.equals(key)) {
            // 主诉未标记空检索的情况下
            list = analysisSentenceDao.retrieveZsListNullSearch();
            if (ListUtil.isNotEmpty(list)) {
                List<HashMap<String, Object>> mapList = analysisSentenceDao
                        .selectVocabularySidPropertyMapper(getIdListStr(list));
                list = removeSameItem(list, mapList);
            }
        } else if (key.startsWith(RedisConstant.NULL_SEARCH_HPI_UNLABELED_NO_SECOND_TYPE) && key.split("\01").length == 4) {
            // 现病史未标记空检索的情况下
            list = analysisSentenceDao.retrieveXbsListNullSearchUnlabeled(secondType);
            // 如果list不为空，做去重处理
            if (ListUtil.isNotEmpty(list)) {
                List<HashMap<String, Object>> mapList = analysisSentenceDao
                        .selectVocabularySidPropertyMapper(getIdListStr(list));
                list = removeSameItem(list, mapList);
            }
        } else if (key.startsWith(RedisConstant.NULL_SEARCH_HPI_LABELED_NO_SECOND_TYPE) && key.split("\01").length == 4) {
            // 现病史已标记空检索的情况下
            list = analysisSentenceDao.retrieveXbsListNullSearchLabeled(secondType);
        } else if (RedisConstant.NULL_SEARCH_MEDICAL_UNLABELED.equals(key)
                || RedisConstant.NULL_SEARCH_MEDICAL_LABELED.equals(key)
                || RedisConstant.NULL_SEARCH_COMPLAIN_LABELED.equals(key)) {
            // 其他空检索情况下
            list = analysisSentenceDao.retrieveListNoLimit(param);
        } else {
            // 其他情况下
            // TODO
            // list = analysisSentenceDao.retrieveListNoLimit(param);
            @SuppressWarnings("unchecked")
            List<AnalysisSearchParam> searchParams = (List<AnalysisSearchParam>) param.get("searchParams");
            list = getListFromSphinx(searchParams, type, labelStatus, secondType);

            // 如果是搜索的情况下，要过滤已经删除的记录
            Iterator<AnalysisSentence> iter = list.iterator();
            ValueOperations<String, Object> valueopsOther = redisStrTemplate.opsForValue();
            Object deleteId = valueopsOther.get(RedisConstant.KEY_ANALYSIS_DELETE);
            if (deleteId != null) {
                while (iter.hasNext()) {
                    AnalysisSentence item = iter.next();
                    if (deleteId.toString().contains(item.getId() + "")) {
                        iter.remove();
                    }
                }
            }

        }
        // 将检索结果放入redis
        addAnalysisSentenceListInRedis(key, list);
        return list;
    }

    /**
     * 得到检索用的key
     *
     * @param map
     * @return
     */
    private String getSelectKey(Map<String, Object> map, String secondType) {
        StringBuilder sb = new StringBuilder();
        int type = (int) map.get("type");
        sb.append(type);
        sb.append((char) 1).append(map.get("labelStatus"));
        // 如果type为现病史且二级标签不为空，加入二级标签
        if (type == AnalysisSentenceConstant.TYPE_XBS && StringUtils.isNotEmpty(secondType)) {
            sb.append((char) 1).append(secondType);
        }
        @SuppressWarnings("unchecked")
        List<AnalysisSearchParam> searchParams = (List<AnalysisSearchParam>) map.get("searchParams");
        if (searchParams != null) {
            for (AnalysisSearchParam searchParam : searchParams) {
                sb.append((char) 1).append(searchParam.getId());
                sb.append((char) 1).append(searchParam.getVocabulary());
                sb.append((char) 1).append(searchParam.getProperty());
                sb.append((char) 1).append(searchParam.getPosition1());
                sb.append((char) 1).append(searchParam.getPosition2());
                sb.append((char) 1).append(searchParam.getOppositePropertyFlag());
                sb.append((char) 1).append(searchParam.getOppositePosition1Flag());
            }
        }
        sb.append((char) 1).append(map.get("orderStr"));
        return sb.toString();
    }

    /**
     * 从语句list中得到id以,拼接的字符串
     *
     * @param list
     * @return
     */
    private String getIdListStr(List<AnalysisSentence> list) {
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < list.size(); i++) {
            sb.append("'").append(list.get(i).getId()).append("'");
            if (i < list.size() - 1) {
                sb.append(",");
            }
        }
        return sb.toString();
    }

    /**
     * 移除列表中相似度较高的项目
     *
     * @param list
     * @param mapList
     * @return
     */
    private List<AnalysisSentence> removeSameItem(List<AnalysisSentence> list, List<HashMap<String, Object>> mapList) {
        if (list == null || list.size() <= 1) {
            return list;
        }
        HashMap<Long, String> propertyMap = new HashMap<>();
        for (HashMap<String, Object> map : mapList) {
            propertyMap.put((long) map.get("sid"), String.valueOf(map.get("property")));
        }
        List<AnalysisSentence> result = new ArrayList<>();
        result.add(list.get(0));
        for (int i = 1; i < list.size(); i++) {
            boolean flag = true;
            for (int j = 1; j <= 10; j++) {
                if ((i - j) < 0) {
                    break;
                }
                if (StringUtils.isEmpty(propertyMap.get(list.get(i).getId()))) {
                    LOGGER.info("ID为:" + list.get(i).getId() + "的语句找不到对应的属性集合");
                    continue;
                }
                if (StringUtils.isEmpty(propertyMap.get(list.get(i - j).getId()))) {
                    LOGGER.info("ID为:" + list.get(i - j).getId() + "的语句找不到对应的属性集合");
                    continue;
                }
                double d = SimilarityAlgo.getSimilarity(propertyMap.get(list.get(i).getId()),
                        propertyMap.get(list.get(i - j).getId()));
                if (d > 0.93D) {
                    flag = false;
                }
            }
            if (flag) {
                result.add(list.get(i));
            }
        }
        return result;
    }

    private String getSourceErrMsg() {

        List<Map<String, String>> sourceErrList = analysisVocabularyDao.retrieveSourceErrMsg();
        String errMsg = "";
        if (sourceErrList != null && !sourceErrList.isEmpty()) {
            errMsg = "分句分类有冲突：";
            for (Map<String, String> sourceErr : sourceErrList) {
                errMsg += sourceErr.get("sentence");
                errMsg += "->";
                errMsg += sourceErr.get("source");
                errMsg += "；";
            }
        }

        return errMsg;
    }

    /**
     * 向redis中添加语句数据
     *
     * @param key
     * @param list
     */
    private void addAnalysisSentenceListInRedis(String key, List<AnalysisSentence> list) {
        ValueOperations<String, List<AnalysisSentence>> valueops = redisTemplate.opsForValue(); // redis数据操作对象
        String keyPattern = key.substring(0, 4) + "*";
        // 将redis中对应的pattern下的数据清除
        Set<String> keys = redisTemplate.keys(keyPattern);
        // 空检索的关键字例外，不清除
        keys.remove(RedisConstant.NULL_SEARCH_COMPLAIN_LABELED);
        keys.remove(RedisConstant.NULL_SEARCH_MEDICAL_LABELED);
        keys.remove(RedisConstant.NULL_SEARCH_COMPLAIN_UNLABELED);
        keys.remove(RedisConstant.NULL_SEARCH_MEDICAL_UNLABELED);
        Iterator<String> iterator = keys.iterator();
        String k;
        while (iterator.hasNext()) {
            k = iterator.next();
            if (k.startsWith(RedisConstant.NULL_SEARCH_HPI_UNLABELED_NO_SECOND_TYPE)
                    || k.startsWith(RedisConstant.NULL_SEARCH_HPI_LABELED_NO_SECOND_TYPE)) {
                iterator.remove();
            }
        }
        redisTemplate.delete(keys);
        // 添加数据
        valueops.set(key, list);
    }

    /**
     * 从sphinx搜索引擎中得到数据列表
     *
     * @param searchParams
     * @param type
     * @param labelStatus
     * @param secondType
     * @return
     * @throws SphinxException
     */
    private List<AnalysisSentence> getListFromSphinx(List<AnalysisSearchParam> searchParams, int type, int labelStatus,
                                                     String secondType) throws SphinxException {
        List<AnalysisSentence> result = new ArrayList<>();
        if (searchParams == null || searchParams.size() == 0) {
            return result;
        }
        int sphinxPort = 9312;
        SphinxClient sphinxClient = new SphinxClient(sphinxHost, sphinxPort);
        // 设置排序模式
        sphinxClient.SetSortMode(SphinxClient.SPH_SORT_ATTR_DESC, "gmt_modified");
        // 分页查询的范围
        sphinxClient.SetLimits(0, 200000, 200000);
        // 过滤器设置，true为！=，false为=
        sphinxClient.SetFilter("label_status", labelStatus, false);
        AnalysisSentence sentence = null;
        SphinxResult sphinxResult = null;
        String queryStr = null;
        String indexStr = null;
        LinkedHashMap<Long, String[]> resultMap = null;
        // 查询
        for (int i = 0; i < searchParams.size(); i++) {
            queryStr = getQueryVocabularyStr(searchParams.get(i));
            if (type == AnalysisSentenceConstant.TYPE_XBS) {
                queryStr = queryStr + "&(@second_type=\"^" + secondType + "$\")";
            }
            indexStr = "vocabulary" + type + "|vocabulary" + type + "_delta";
            LOGGER.info("查询语句为" + queryStr);
            LOGGER.info("索引语句为" + indexStr);
            sphinxResult = sphinxClient.Query(queryStr, indexStr);
            if (sphinxResult == null || sphinxResult.getMatches() == null || sphinxResult.getMatches().length == 0) {
                return result;
            }
            if (i == 0) {
                resultMap = getResultMap(sphinxResult.getMatches());
            } else {
                // 得到多个查询结果中的交集部分
                resultMap = ListUtil.getRepeatResult(resultMap, getResultMap(sphinxResult.getMatches()));
            }
        }
        if (resultMap == null || resultMap.size() == 0) {
            return result;
        }
        for (Map.Entry<Long, String[]> entry : resultMap.entrySet()) {
            sentence = new AnalysisSentence();
            sentence.setId(entry.getKey());
            sentence.setSentence(entry.getValue()[0]);
            sentence.setGmtModified(DateUtils.getDateStrFromUnixTimestamp(entry.getValue()[1]));
            result.add(sentence);
            if (result.size() >= 5000) {
                return result;
            }
        }
        return result;
    }

    /**
     * 根据搜索条件集合得到SphinxQL查询词语语句
     *
     * @param searchParam
     * @return
     */
    private String getQueryVocabularyStr(AnalysisSearchParam searchParam) {
        if (searchParam == null) {
            return null;
        }
        StringBuilder sb = new StringBuilder();
        sb.append("(@vocabulary=\"^").append(searchParam.getVocabulary()).append("$\")");
        if (StringUtils.isNotEmpty(searchParam.getProperty())) {
            if ("0".endsWith(searchParam.getOppositePropertyFlag())) {
                sb.append("&(@property=").append(searchParam.getProperty()).append(")");
            } else {
                List<String> propertyList = new ArrayList<String>(Arrays
                        .asList(new String[]{"O", "O_AD", "P", "R", "R_AD", "S_AD", "S_I", "DI", "DE", "IN", "AD"}));
                propertyList.remove(searchParam.getProperty());
                sb.append("&(@property=");
                String prefix = "";
                for (String property : propertyList) {
                    sb.append(prefix);
                    prefix = "|";
                    sb.append(property);
                }
                sb.append(")");
            }
        }
        if (StringUtils.isNotEmpty(searchParam.getPosition1())) {
            if ("0".endsWith(searchParam.getOppositePosition1Flag())) {
                sb.append("&(@position1=").append(searchParam.getPosition1()).append(")");
            } else {
                List<String> position1List = new ArrayList<String>(
                        Arrays.asList(new String[]{"N", "B", "M", "E", "S", "BB", "MM", "EE"}));
                position1List.remove(searchParam.getPosition1());
                sb.append("&(@position1=");
                String prefix = "";
                for (String position1 : position1List) {
                    sb.append(prefix);
                    prefix = "|";
                    sb.append(position1);
                }
                sb.append(")");
            }
        }
        if (StringUtils.isNotEmpty(searchParam.getPosition2())) {
            sb.append("&(@position2=^").append(searchParam.getPosition2()).append("$)");
        }
        return sb.toString();
    }

    /**
     * 得到去重后的结果集合
     *
     * @param matches
     * @return
     */
    private LinkedHashMap<Long, String[]> getResultMap(SphinxMatch[] matches) {
        LinkedHashMap<Long, String[]> map = new LinkedHashMap<>();
        String sid = null;
        String sentence;
        String gmtModified;
        for (SphinxMatch m : matches) {
            sid = String.valueOf(m.getAttrValues().get(0));
            sentence = String.valueOf(m.getAttrValues().get(1));
            gmtModified = String.valueOf(m.getAttrValues().get(7));
            map.put(Long.valueOf(sid), new String[]{sentence, gmtModified});
        }
        return map;
    }

    /**
     * 无法标注句子
     *
     * @param secondType
     * @param idStr
     * @return
     */
    public ReturnData canNotTag(String secondType, String idStr) {
        Long id = Long.parseLong(idStr);

        //从analysis库中删除，移入已删除的表中标注为无法标注
        analysisSentenceDao.insertCannotTag(id);

        LOGGER.info("更新redis开始");
        // 移除redis中未标记列表的相关元素，再向已标记列表中添加相关元素
        String queryStr = RedisConstant.KEY_PATTERN_HPI_UNLABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";

        removeAnalySentenceceOfRedis(id, queryStr);
        // 由于搜索引擎删除过于繁琐 ，在显示的时候过滤，增加过滤的redis
        ValueOperations<String, Object> valueops = redisStrTemplate.opsForValue();
        Object deleteId = valueops.get(RedisConstant.KEY_ANALYSIS_DELETE);
        if (deleteId != null) {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, deleteId + idStr + ",");
        } else {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, "," + idStr + ",");
        }
        LOGGER.info("更新redis完成");
        return ReturnUtil.success();
    }

    /**
     * 进入无法标注画面
     *
     * @param startStr
     * @param secondType
     * @return
     */
    public Map<String, Object> canNotTagList(String startStr, String sentence, String exactSearchFlag, String secondType) {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);

        // 初始化过滤的条件
        Map<String, Object> param = new HashMap<>();
        // 根据词语查询
        if (sentence != null && !sentence.isEmpty()) {
            if (exactSearchFlag != null && exactSearchFlag.equals("1")) {
                param.put("sentence", "%\t" + sentence + "\t%");
            } else {
                param.put("sentence", "%" + sentence.replace(";", "%").replace("；", "%").replace(",", "%").replace("，", "%") + "%");
            }
        }
        param.put("type", 2);
        param.put("secondType", secondType);

        //获取无法标注句子的列表
        List<AnalysisSentence> list = analysisSentenceDao.retrieveXbsCanNotTag(param);

        // 获取数据总量
        int totalSize = 0;
        if (list != null) {
            totalSize = list.size();
        }
        // 获取翻页相关
        Map<String, Integer> pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        if (startIndex < totalSize) {
            int endIndex = startIndex + 20 > totalSize ? totalSize : startIndex + 20;
            list = list.subList(startIndex, endIndex);
        }

        Map<String, String> showSearch = new LinkedHashMap<>();
        showSearch.put("sentence", "句子");

        Map<String, String> searchMap = new HashMap<>();
        searchMap.put("sentence", sentence);

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, searchMap);
        map.put("showSearch", showSearch);
        map.put("showSearchValue", searchMap);
        map.put("secondTypeMap", AnalysisSentenceConstant.secondTypeMap);

        return map;
    }

    /**
     * 可以标注句子
     *
     * @param secondType
     * @param idStr
     * @return
     */
    public ReturnData canTag(String secondType, String idStr) {
        Long id = Long.parseLong(idStr);

        // 从已删除表中删除，移入analysis表，变为未标注句子
        long newId = analysisSentenceDao.recoverCanTag(id);
        analysisSentenceDao.recoverCanTagVocab(id, newId);

        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveObjectById(newId);//根据id从未删除表取出句子

        List<AnalysisVocabulary> vocabularys = analysisVocabularyDao.retrieveVocabById(newId);//根据id从未删除表取出词语

        LOGGER.info("更新redis开始");
        // 移除redis中未标记列表的相关元素，再向已标记列表中添加相关元素
        String queryStr = RedisConstant.KEY_PATTERN_HPI_UNLABELED_NO_SECOND_TYPE + (char) 1 + secondType + (char) 1 + "*";

        addAnalySentenceceInRedis(analysisSentence, queryStr, vocabularys);
        // 由于搜索引擎删除过于繁琐 ，在显示的时候过滤，增加过滤的redis
        ValueOperations<String, Object> valueops = redisStrTemplate.opsForValue();
        Object deleteId = valueops.get(RedisConstant.KEY_ANALYSIS_DELETE);
        if (deleteId != null) {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, deleteId + idStr + ",");
        } else {
            valueops.set(RedisConstant.KEY_ANALYSIS_DELETE, "," + idStr + ",");
        }
        LOGGER.info("更新redis完成");
        return ReturnUtil.success();
    }
}


