package com.huiquan.check_sentence.service;

import com.huiquan.analysis.constants.AnalysisSentenceConstant;
import com.huiquan.analysis.constants.RedisConstant;
import com.huiquan.analysis.dao.AnalysisSentenceDao;
import com.huiquan.analysis.dao.AnalysisVocabularyDao;
import com.huiquan.analysis.domain.*;
import com.huiquan.analysis.service.AnalysisSentenceService;
import com.huiquan.check_sentence.dao.CheckSentenceDao;
import com.huiquan.framework.base.BaseService;
import com.huiquan.framework.base.ReturnCode;
import com.huiquan.framework.base.ReturnData;
import com.huiquan.framework.utils.GetListUtil;
import com.huiquan.framework.utils.ReturnUtil;
import com.huiquan.sphinx.SphinxException;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.ModelAndView;

import javax.servlet.http.HttpServletRequest;
import java.util.*;

/**
 * @author lichao email:lichao@witspring.com
 * @Description:
 * @Date 2017/12/1 10:56
 * @since Ver V
 */
@Service
public class CheckSentenceService extends BaseService {

    @Autowired
    private CheckSentenceDao checkSentenceDao;

    @Autowired
    private AnalysisSentenceDao analysisSentenceDao;

    @Autowired
    private AnalysisVocabularyDao analysisVocabularyDao;
    @Autowired
    private AnalysisSentenceService analysisSentenceService;

    /**
     * 进入词语列表画面
     *
     * @param startStr
     * @param labelStatus
     * @param secondType
     * @return
     */
    public ModelAndView list(String startStr, int labelStatus, String secondType) {
        int start = startStr == null ? 1 : Integer.parseInt(startStr);

        // 初始化过滤的条件
        Map<String, Object> param = new HashMap<>();
        param.put("labelStatus", labelStatus);
        param.put("secondType", secondType);
        param.put("orderStr", "gmt_modified1 desc");

        // 获取数据总量
        int totalSize = checkSentenceDao.selectCheckSentenceCount(param);

        // 获取翻页相关
        Map<String, Integer> pageNo = GetListUtil.getPageNoMap(totalSize, start);
        int endPage = pageNo.get("endPage");
        start = pageNo.get("start");
        int startIndex = pageNo.get("startIndex");

        // 获取列表
        List<AnalysisSentence> list = new ArrayList<>();
        if (startIndex < totalSize) {
            param.put("startIndex", startIndex);
            list = checkSentenceDao.selectCheckSentenceList(param);
        }

        Map<String, Object> map = GetListUtil.getReturnMap2(totalSize, start, startIndex, endPage, list, null);
        map.put("secondTypeMap", AnalysisSentenceConstant.secondTypeMap);
        return new ModelAndView("check_sentence/index", map);
    }

    /**
     * 进入词语编辑画面
     *
     * @param idStr
     * @return
     */
    public ModelAndView preEdit(String idStr) {
        // 根据id获取句子和分词
        Long id = Long.parseLong(idStr);
        AnalysisSentence analysisSentence = analysisSentenceDao.retrieveObjectById(id);
        List<AnalysisVocabulary> vocabularys = analysisVocabularyDao.retriveListBySid(id);
        Map<String, String> xbsValue = null;
        // 如果现病史查询二级分类
        xbsValue = analysisSentenceDao.selectContextBySid(analysisSentence.getId());
        // 获取属性和词性集合，并将分词中不存在的数据添加
        Map<String, String> characterMap = new LinkedHashMap<>();
        characterMap.putAll(AnalysisSentenceConstant.getPropertyMapByType(AnalysisSentenceConstant.CHARACTER_LABEL));
        Map<String, String> propertyMap = new LinkedHashMap<>();
        propertyMap.putAll(AnalysisSentenceConstant.getPropertyMapByTypeAndSecondType(AnalysisSentenceConstant.TYPE_XBS,
                xbsValue == null ? "" : xbsValue.get("second_type")));

        for (AnalysisVocabulary term : vocabularys) {
            if (!characterMap.containsKey(term.getCharacteristic())) {
                characterMap.put(term.getCharacteristic(), term.getCharacteristic());
            }
            if (!propertyMap.containsKey(term.getProperty())) {
                propertyMap.put(term.getProperty(), term.getProperty());
            }
        }

        List<AnalysisVocabularySourceDto> vocabSources = analysisSentenceService.getVocabSourceDto(vocabularys);

        Map<String, Object> map = new HashMap<>();
        map.put("id", id);
        map.put("sentence", analysisSentence.getSentence());
        map.put("vocabularys", vocabSources);
        map.put("characterMap", characterMap);
        map.put("position1List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION1_LABEL));
        map.put("position2List", AnalysisSentenceConstant.getPositionByType(AnalysisSentenceConstant.POSITION2_LABEL));
        map.put("propertyMap", propertyMap);

        // 如果是现病史语句
        if (analysisSentence.getType() == AnalysisSentenceConstant.TYPE_XBS) {
            // 得的语句关联的上下文语句
            Map<String, String> context = analysisSentenceDao.selectContextBySid(id);
            map.put("aboveSentence1", context.get("above_sentence1"));
            map.put("aboveSentence2", context.get("above_sentence2"));
            map.put("aboveSentence3", context.get("above_sentence3"));
            map.put("belowSentence1", context.get("below_sentence1"));
            map.put("belowSentence2", context.get("below_sentence2"));
            map.put("belowSentence3", context.get("below_sentence3"));

            map.put("secondTypeMap", AnalysisSentenceConstant.secondTypeMap);
        }

        // 取得外包人员标注数据
        List<Long> osidList = checkSentenceDao.selectOsidList(id);
        List<List<AnalysisVocabulary>> ovocabularyLists = new ArrayList<>();
        for (Long osid : osidList) {
            ovocabularyLists.add(checkSentenceDao.selectOutsourcingVocabularyByOsid(osid));
        }
        modifyOutsourcingVocabularyLists(ovocabularyLists);
        map.put("vocabularyList1", ovocabularyLists.get(0));
        map.put("vocabularyList2", ovocabularyLists.get(1));
        map.put("vocabularyList3", ovocabularyLists.get(2));

        return new ModelAndView("check_sentence/sentence_edit", map);
    }

    /**
     * 进行词语编辑操作
     *
     * @param typeStr
     * @param secondType
     * @param idStr
     * @param user
     * @param vocabularyStrs
     * @param characterStrs
     * @param propertyStrs
     * @param position1Strs
     * @param position2Strs
     * @param sourceStrs
     * @return
     * @throws Exception
     */
    public ReturnData edit(String typeStr, String secondType, String idStr, User user, String[] vocabularyStrs,
                           String[] characterStrs, String[] propertyStrs, String[] position1Strs, String[] position2Strs,
                           HttpServletRequest req, String[] sourceStrs) throws Exception {
        ReturnData returnData = analysisSentenceService.edit(typeStr, secondType, idStr, user, vocabularyStrs, characterStrs, propertyStrs, position1Strs, position2Strs, req, sourceStrs);
        // 如果analysisSentenceService处理失败，直接返回结果
        if (!ReturnCode.SUCCESS.getCode().equals(returnData.getReturnCode().getCode())) {
            return returnData;
        }
        Long sid = Long.parseLong(idStr);
        // 把对应的分包语句状态置为已校验
        checkSentenceDao.updateOutsourcingSentenceStatus(sid, 1);

        // 根据标记结果判断相关的bas_analysis_outsourcing_vocabulary数据的正误flag
        List<AnalysisVocabulary> analysisVocabularys = analysisVocabularyDao.retriveListBySid(sid);
        List<Long> osidList = checkSentenceDao.selectOsidList(sid);
        for (Long osid : osidList) {
            List<AnalysisVocabulary> vocabularyList = checkSentenceDao.selectOutsourcingVocabularyByOsid(osid);
            Map<Long, Integer> flagMap = getOutsourcingVocobularyFalg(vocabularyList, analysisVocabularys);
            for (Long id : flagMap.keySet()) {
                checkSentenceDao.updateOutsourcingVocabularyFlag(id, flagMap.get(id));
            }
        }
        // 更新搜索引擎
        analysisSentenceService.asynchronousUpdateSphinxIndex("vocabulary" + typeStr + "_delta");
        return returnData;
    }

    /**
     * 比较分包词语和人工标记词语的差异得到分包词语的正误flag(0-错误；1-正确)
     *
     * @param outsourcingVocobularys
     * @param analysisVocabularys
     * @return
     */
    private Map<Long, Integer> getOutsourcingVocobularyFalg(List<AnalysisVocabulary> outsourcingVocobularys, List<AnalysisVocabulary> analysisVocabularys) {
        int ovIndex = 0;
        int avIndex = 0;
        int oStartIndex = 0;
        int aStartIndex = 0;
        int oEndIndex;
        int aEndIndex;
        Map<Long, Integer> result = new HashMap<>();
        while (ovIndex < outsourcingVocobularys.size()) {
            AnalysisVocabulary ov = outsourcingVocobularys.get(ovIndex);
            AnalysisVocabulary av = analysisVocabularys.get(avIndex);
            // 如果ov的分词语句和av相等，判断其他字段是否也相等
            oEndIndex = oStartIndex + ov.getVocabulary().length();
            aEndIndex = aStartIndex + av.getVocabulary().length();
            if (oStartIndex == aStartIndex && oEndIndex == aEndIndex) {
                result.put(ov.getId(), compareVocabulary(ov, av));
            } else {
                result.put(ov.getId(), 0);
            }
            if (oEndIndex < aEndIndex) {
                ovIndex++;
                oStartIndex = oEndIndex;
            } else if (oEndIndex > aEndIndex) {
                avIndex++;
                aStartIndex = aEndIndex;
            } else {
                ovIndex++;
                oStartIndex = oEndIndex;
                avIndex++;
                aStartIndex = aEndIndex;
            }
        }
        return result;
    }

    /**
     * 比较两个分词是否相同
     *
     * @param ov
     * @param av
     * @return 0-不相等；1-相等
     */
    private Integer compareVocabulary(AnalysisVocabulary ov, AnalysisVocabulary av) {
        if (!StringUtils.equals(ov.getVocabulary(), av.getVocabulary())) {
            return 0;
        } else if (!StringUtils.equals(ov.getCharacteristic(), av.getCharacteristic())) {
            return 0;
        } else if (!StringUtils.equals(ov.getProperty(), av.getProperty())) {
            return 0;
        } else if (!StringUtils.equals(ov.getPosition1(), av.getPosition1())) {
            return 0;
        } else if (!StringUtils.equals(ov.getPosition2(), av.getPosition2())) {
            return 0;
        } else {
            return 1;
        }
    }

    /**
     * 使各个vocabulary列表的长度相等
     *
     * @param ovocabularyLists
     */
    private void modifyOutsourcingVocabularyLists(List<List<AnalysisVocabulary>> ovocabularyLists) {
        // 找到集合中最大的列表size
        int maxSize = 0;
        for (List<AnalysisVocabulary> ovocabularyList : ovocabularyLists) {
            if (ovocabularyList.size() > maxSize) {
                maxSize = ovocabularyList.size();
            }
        }
        // 遍历集合中的列表，如果列表size比maxSize小，则添加空元素直到列表size等于maxSize
        AnalysisVocabulary empty = new AnalysisVocabulary();
        for (List<AnalysisVocabulary> ovocabularyList : ovocabularyLists) {
            while (ovocabularyList.size() < maxSize) {
                ovocabularyList.add(empty);
            }
        }
    }

}
