package com.winning.pmph.service;

import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.winning.pmph.entity.*;
import com.winning.pmph.enumeration.KnowledgeSystem;
import com.winning.pmph.mapper.TermBookMetadataMapper;
import com.winning.pmph.mapper.TermMapper;
import com.winning.pmph.mapper.TermStandardMapper;
import com.winning.pmph.utils.*;
import com.winning.pmph.vo.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jsoup.Jsoup;
import org.jsoup.select.Elements;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.Resource;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
import java.io.File;
import java.io.FileInputStream;
import java.util.*;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * @author 刘亚飞
 * @Description
 * @create 2021-03-30 15:30
 */
@Slf4j
@Component
@Transactional
public class TermService extends ServiceImpl<TermMapper, Term> {

    @Resource
    private UncommonWordService uncommonWordService;
    @Resource
    private RuleService ruleService;
    @Resource
    private RuleItemService ruleItemService;
    @Resource
    private TermMapper termMapper;
    @Resource
    private TermBetweenService termBetweenService;
    @Resource
    private TermBookMetadataMapper termBookMetadataMapper;
    @Resource
    private TermStandardMapper termStandardMapper;
    @Resource
    private TermStandardHistoryService termStandardHistoryService;
    @Resource
    private KnowledgeSyncStatusService knowledgeSyncStatusService;

    public void importBook(TermBookMetadata termBookMetadata, String header) {
        // 解压图书;
        ParseBookZipUtil.unzipBook(termBookMetadata.getFilePath(), header);
        // 递归查找某一文件夹下同时含有meta.xml，front.xml，body.xml,pics 4个文件的文件夹，这个文件夹下的
        // 文件才是正确的应该被解析的文件
        List<String> fileList = Lists.newArrayList("meta.xml", "body.xml", "front.xml", "pics");
        String analysisPath = ParseBookZipUtil.recursiveFindFile(new File(StringUtils.removeEnd(termBookMetadata.getFilePath(), ".zip")), fileList);
        if (StringUtils.isBlank(analysisPath)) {
            throw new RuntimeException("未查找到同时包含meta.xml，body.xml，front.xml，pics的文件夹");
        }
        termBookMetadata.setFilePath(analysisPath);
        // 解析图书元数据;
        String metadata = ParseBookZipUtil.parseBookMetadata(analysisPath);
        if (StringUtils.isBlank(metadata)) {
            throw new RuntimeException("元数据信息为空");
        }
        JSONObject metadataJson = JSONObject.parseObject(metadata);
        String isbn = metadataJson.getString("ISBN");
        termBookMetadata.setIsbn(isbn);
        // 校验ISBN号是否已存在;
        List<TermBookMetadata> books = termBookMetadataMapper.lambdaQuery().eq(TermBookMetadata::getIsbn, termBookMetadata.getIsbn())
                .ne(TermBookMetadata::getId, termBookMetadata.getId()).list();
        if (books.size() > 0) {
            throw new RuntimeException("图书ISBN号已存在, 请删除后重新导入!");
        }
        termBookMetadata.setMetadata(metadata);
        String name = JSONObject.parseObject(metadataJson.getString("Titles")).getString("Main_Title");
        termBookMetadata.setName(getValue(name, 1));
        termBookMetadata.setVersion(getValue(name, 0));
        // 解析图书文前并保存
        String frontStr = StringEscapeUtils.unescapeJava(ParseBookZipUtil.parseFrontBook(analysisPath));
        termBookMetadata.setImgPath("/ymer/action/file/showImage?filename=image/term/" + termBookMetadata.getIsbn() + "/pics/");
        frontStr = bindImgSrc(termBookMetadata, frontStr);
        termBookMetadata.setFront(frontStr);
        // 解析body.xml文件,并入对应的术语表
        parseBody(termBookMetadata);
        // 移动图片文件夹;
        ParseBookZipUtil.copyPicAndTable(analysisPath, "image/term/" + isbn);
        termBookMetadata.setStatus(TermBookMetadata.STATUS_NO_SELFCHECKING);
    }

    private String getValue(String name, int type) {
        Pattern pattern = Pattern.compile("\\(.*?\\)|\\)|（.*?）|）");
        Matcher matcher = pattern.matcher(name);
        if (0 == type) {
            while (matcher.find()) {
                return matcher.group(0).substring(1, matcher.group().length() - 1);
            }
            return "";
        } else {
            return name.replaceAll("\\(.*?\\)|\\)|（.*?）|）", "");
        }
    }


    public List<TermBookMetadata> queryBookMetadata(TermBookMetadata termBookMetadata) {
        String sql = "select `id`, `name`, `version`, `isbn`, `category`, `status`, `failure_reason`, `metadata`, `deleted`, " +
                "(select name from user where user_name = term_book_metadata.created_by) as `created_by`, " +
                "`created_time`, `updated_time`," +
                "(select count(1) from term where term_book_metadata_id = term_book_metadata.id and deleted = 1 and `status` = 4) as groupConflict  from term_book_metadata where deleted = 0 ";
        sql = queryBookMetadataCondition(termBookMetadata, sql);
        sql += " order by updated_time desc,id limit " + termBookMetadata.getCurrentPage() * termBookMetadata.getPageSize() + ", " + termBookMetadata.getPageSize();
        List<TermBookMetadata> list = termBookMetadataMapper.executeMultiSelect(sql);
        return list;
    }


    public int queryBookMetadataCount(TermBookMetadata termBookMetadata) {
        String sql = "select `id` from term_book_metadata where deleted = 0 ";
        sql = queryBookMetadataCondition(termBookMetadata, sql);
        List<TermBookMetadata> list = termBookMetadataMapper.executeMultiSelect(sql);
        return list.size();
    }

    private String queryBookMetadataCondition(TermBookMetadata termBookMetadata, String sql) {
        if (StringUtils.equals("between", termBookMetadata.getType())) {
            sql += " and status in (3, 7, 8, 9, 10)";
        }
        if (StringUtils.isNotBlank(termBookMetadata.getName())) {
            sql += " and name like '%" + termBookMetadata.getName() + "%'";
        }
        if (Objects.nonNull(termBookMetadata.getCategory())) {
            sql += " and category = '" + termBookMetadata.getCategory() + "'";
        }
        if (StringUtils.isNotBlank(termBookMetadata.getVersion())) {
            sql += " and version like '%" + termBookMetadata.getVersion() + "%'";
        }
        if (StringUtils.isNotBlank(termBookMetadata.getIsbn())) {
            sql += " and isbn like '%" + termBookMetadata.getIsbn() + "%'";
        }
        if (Objects.nonNull(termBookMetadata.getStatus())) {
            if (StringUtils.equals("original", termBookMetadata.getType()) && TermBookMetadata.STATUS_ENTER_BETWEEN.equals(termBookMetadata.getStatus())) {
                sql += " and status in (3, 7, 8, 9, 10)";
            } else {
                sql += " and status = " + termBookMetadata.getStatus();
            }
        }
        if (StringUtils.isNotBlank(termBookMetadata.getCreatedBy())) {
            sql += " and created_by in (select user_name from user where name like '%" + termBookMetadata.getCreatedBy() + "%')";
        }
        if (StringUtils.isNotBlank(termBookMetadata.getImportDate())) {
            JSONArray jsonArray = JSONObject.parseArray(termBookMetadata.getImportDate());
            sql += " and created_time >= '" + jsonArray.get(0) + "' and created_time <= '" + jsonArray.get(1) + "'";
        }
        return sql;
    }


    public void compareToStandard(TermBookMetadata termBookMetadata) {
        // 查询是否有正在对比的标准术语
        List<TermBookMetadata> termBookMetadatas = termBookMetadataMapper.selectBeansByWherePart(" status = " + TermBookMetadata.STATUS_COMPARING_STANDARD + " and id != '" + termBookMetadata.getId() + "'");
        // 如果存在正在比对的术语,则不能比对
        if (termBookMetadatas.size() > 0) {
            throw new RuntimeException("系统中存在正在比对的术语,请稍后再试");
        }
        // 逻辑限定,当入库术语存在冲突时,不能再次对比,只有当入库术语中不存在冲突时,才能进行下一本书的对比
        List<TermBetween> termBetweenArray = termBetweenService.getBaseMapper().selectBeansByWherePart(" conflict_concept_id is not null and conflict_concept_id != ''");
        if (termBetweenArray.size() > 0) {
            throw new RuntimeException("标准术语中存在与中间术语冲突的术语,请处理完冲突后再进行对比");
        }
        // 拼接JSONObject
        JSONObject bookPropertyObject = new JSONObject();
        bookPropertyObject.put("name", termBookMetadata.getName());
        bookPropertyObject.put("version", termBookMetadata.getVersion());
        bookPropertyObject.put("isbn", termBookMetadata.getIsbn());
        bookPropertyObject.put("category", termBookMetadata.getCategory());
        bookPropertyObject.put("type", Const.IMPORT);
        termBookMetadata.setSource(bookPropertyObject);

        List<TermBetween> termBetweens = termBetweenService.getBaseMapper().selectBeansByWherePart(
                "term_book_metadata_id = '" + termBookMetadata.getId() + "' " +
                        " and deleted = 0");
        // 更新入库术语状态为已入标准术语库
        termBookMetadata.setStatus(TermBookMetadata.STATUS_STANDARD);
        termBookMetadata.setEnterStandardBy(termBookMetadata.getUpdatedBy());
        termBookMetadata.setEnterStandardTime(termBookMetadata.getUpdatedTime());
        List<TermStandard> termStandards = termStandardMapper.selectBeansByWherePart(
                " deleted = 0");
        // 标准术语的正名
        Map<String, TermStandard> name2TermStandardMap = termStandards.stream().collect(Collectors.toMap(TermStandard::getName, item -> item));
        // 标准术语的同义词
        Map<String, TermStandard> synonym2TermStandardMap = new HashMap<>();
        // 标准术语的正名和同义词
        Map<String, TermStandard> nameAndSynonym2TermStandardMap = new HashMap<>();
        for (TermStandard termStandard : termStandards) {
            nameAndSynonym2TermStandardMap.put(termStandard.getName(), termStandard);
            if (CollectionUtils.isNotEmpty(termStandard.getSynonym())) {
                List<String> standardSynonym = termStandard.getSynonym().stream()
                        .map(obj -> ((JSONObject) obj).getString("value"))
                        .collect(Collectors.toList());
                for (String synonymItem : standardSynonym) {
                    nameAndSynonym2TermStandardMap.put(synonymItem, termStandard);
                    synonym2TermStandardMap.put(synonymItem, termStandard);
                }
            }
        }
        Map<String, Set<TermCommon>> conflictId2TermMap = new HashMap<>();
        // 用来存储直接入标准术语库的中间术语
        List<TermBetween> noConflictBetweenList = Lists.newArrayList();
        for (TermBetween termBetween : termBetweens) {
            // 给释义添加来源信息,方便后面直接入标准术语
            // 释义为空时,不修改中间术语的释义值,如果不做空判断,当释义为空时,会存储图书信息
            if (CollectionUtils.isNotEmpty(termBetween.getExplanation())) {
                JSONArray betweenExplainArray = new JSONArray();
                termBetween.getExplanation().forEach(explain -> {
                    // 深copy一份,不改变原来的bookPropertyObject的值
                    JSONObject jsonObject = JSON.parseObject(bookPropertyObject.toJSONString());
                    jsonObject.put("value", explain.toString());
                    betweenExplainArray.add(jsonObject);
                });
                termBetween.setExplanation(betweenExplainArray);
            }
            // 给nameSource赋值,方便存储标准术语的历史版本
            JSONArray nameSourceArray = new JSONArray();
            nameSourceArray.add(bookPropertyObject);
            termBetween.setNameSource(nameSourceArray);

            Set<TermStandard> conflictTermSet = new HashSet<>();
            // 1,通过中间术语的正名找标准术语的正名
            TermStandard nameConflictTerm = name2TermStandardMap.get(termBetween.getName());
            // 2,通过中间术语的正名找标准术语的同义词
            TermStandard synonymConflictTerm = synonym2TermStandardMap.get(termBetween.getName());
            if (Objects.nonNull(nameConflictTerm)) {
                conflictTermSet.add(nameConflictTerm);
            }
            if (Objects.nonNull(synonymConflictTerm)) {
                conflictTermSet.add(synonymConflictTerm);
            }
            if (StringUtils.isNotBlank(termBetween.getSynonym())) {
                String[] splitSynonym = termBetween.getSynonym().split(";");
                for (String synonymItem : splitSynonym) {
                    // 通过当前同义词找到的标准术语;
                    TermStandard cntConflictTerm = nameAndSynonym2TermStandardMap.get(synonymItem);
                    if (Objects.nonNull(cntConflictTerm)) {
                        conflictTermSet.add(cntConflictTerm);
                    }
                }
            }
            // 通过正名没找到标准术语的正名,通过正名找到了标准术语的同义词,不论通过同义词找没找到,都是冲突
            // 通过正名没找到标准术语的正名,通过正名没找到标准术语的同义词,通过同义词找到了标准术语
            if ((Objects.isNull(nameConflictTerm) && Objects.nonNull(synonymConflictTerm))
                    || (Objects.isNull(nameConflictTerm) && Objects.isNull(synonymConflictTerm) && conflictTermSet.size() > 0)
                    || conflictTermSet.size() > 1) {
                String conflictId = IdWorker.getIdStr();
                Set<String> conflictIdSet = conflictTermSet.stream()
                        .map(TermStandard::getConflictConceptId)
                        .filter(StringUtils::isNotEmpty)
                        .collect(Collectors.toSet());
                // 通过冲突id集合, 找到之前设置的冲突术语的集合;
                Set<TermCommon> cntConflictTermSet = conflictIdSet.stream()
                        .map(conflictId2TermMap::get)
                        .flatMap(Collection::stream).collect(Collectors.toSet());
                // 把本次冲突的术语添加到冲突集合
                cntConflictTermSet.addAll(conflictTermSet);
                cntConflictTermSet.add(termBetween);
                for (TermCommon termCommon : cntConflictTermSet) {
                    termCommon.setConflictConceptId(conflictId);
                }
                conflictId2TermMap.put(conflictId, cntConflictTermSet);
            } else if (Objects.nonNull(nameConflictTerm) && Objects.isNull(synonymConflictTerm)) {
                // 需要合并的术语,正名相同且中间术语的同义词与标准术语不冲突
                nameConflictTerm.setXmlClassify(removeSameValue(nameConflictTerm.getXmlClassify(), termBetween.getXmlClassify()));
                nameConflictTerm.setClassify(removeSameValue(nameConflictTerm.getClassify(), termBetween.getClassify()));
                nameConflictTerm.setEnglishName(removeSameValue(nameConflictTerm.getEnglishName(), termBetween.getEnglishName()));
                nameConflictTerm.setEnglishNameSynonym(removeSameValue(nameConflictTerm.getEnglishNameSynonym(), termBetween.getEnglishNameSynonym()));
                nameConflictTerm.setEnglishAbbr(removeSameValue(nameConflictTerm.getEnglishAbbr(), termBetween.getEnglishAbbr()));
                nameConflictTerm.setEnglishAbbrSynonym(removeSameValue(nameConflictTerm.getEnglishAbbrSynonym(), termBetween.getEnglishAbbrSynonym()));
                nameConflictTerm.setFrenchName(removeSameValue(nameConflictTerm.getFrenchName(), termBetween.getFrenchName()));
                nameConflictTerm.setFrenchNameSynonym(removeSameValue(nameConflictTerm.getFrenchNameSynonym(), termBetween.getFrenchNameSynonym()));
                nameConflictTerm.setRussianName(removeSameValue(nameConflictTerm.getRussianName(), termBetween.getRussianName()));
                nameConflictTerm.setRussianNameSynonym(removeSameValue(nameConflictTerm.getRussianNameSynonym(), termBetween.getRussianNameSynonym()));
                nameConflictTerm.setGermanName(removeSameValue(nameConflictTerm.getGermanName(), termBetween.getGermanName()));
                nameConflictTerm.setGermanNameSynonym(removeSameValue(nameConflictTerm.getGermanNameSynonym(), termBetween.getGermanNameSynonym()));
                nameConflictTerm.setJapaneseName(removeSameValue(nameConflictTerm.getJapaneseName(), termBetween.getJapaneseName()));
                nameConflictTerm.setJapaneseNameSynonym(removeSameValue(nameConflictTerm.getJapaneseNameSynonym(), termBetween.getJapaneseNameSynonym()));
                nameConflictTerm.setVersion(nameConflictTerm.getVersion() + 1);
                nameConflictTerm.setUpdatedBy(termBookMetadata.getUpdatedBy());
                nameConflictTerm.setDeleted(0);
                // 把中间术语的同义词转成JSONArray,和标准术语同义词合并
                // 同义词存储来源原因,同义词作为正名时,记录来源
                if (StringUtils.isNotBlank(termBetween.getSynonym())) {
                    JSONArray betweenSynonymArray = new JSONArray();
                    Arrays.asList(termBetween.getSynonym().split(";")).forEach(synonym -> {
                        JSONObject jsonObject = JSON.parseObject(bookPropertyObject.toJSONString());
                        jsonObject.put("value", synonym);
                        betweenSynonymArray.add(jsonObject);
                    });
                    nameConflictTerm.getSynonym().addAll(betweenSynonymArray);

                    // 同义词合并时,如果来源于多本书,存级别最高的一本书,最高级别有多本,随便取一本
                    JSONArray synonymArray = new JSONArray();
                    // 根据同义词分组后,根据category排序,取最小值(级别最高的值)
                    nameConflictTerm.getSynonym().stream().
                            collect(Collectors.groupingBy(obj -> ((JSONObject) obj)
                                            .getString("value"),
                                    Collectors.minBy(Comparator
                                            .comparingInt(obj1 -> ((JSONObject) obj1).getInteger("category")))))
                            .values().stream().forEach(synonymArray::add);
                    nameConflictTerm.setSynonym(synonymArray);
                }

                // 正名来源规则:取级别最高的保留,最高级别的书根据ISBN号去重
                // 取出级别最低的图书级别
                nameConflictTerm.getNameSource().addAll(termBetween.getNameSource());
                int minCategory = nameConflictTerm.getNameSource().stream().mapToInt(obj -> ((JSONObject) obj).getInteger("category"))
                        .min().getAsInt();
                // 过滤出级别最低的图书,根据ISBN号分组后得到map,遍历map的values,取出第一个
                JSONArray standardNameSourceArray = new JSONArray();
                nameConflictTerm.getNameSource().stream()
                        .filter(obj -> ((JSONObject) obj).getInteger("category").intValue() == minCategory)
                        .collect(Collectors.groupingBy(obj -> ((JSONObject) obj).getString("isbn")))
                        .values().forEach(list -> standardNameSourceArray.add(list.get(0)));
                nameConflictTerm.setNameSource(standardNameSourceArray);

                // 合并释义,如果中间术语或者标准术语没有释义,不需要再合并
                if (CollectionUtils.isNotEmpty(termBetween.getExplanation()) &&
                        CollectionUtils.isNotEmpty(nameConflictTerm.getExplanation())) {
                    nameConflictTerm.getExplanation().addAll(termBetween.getExplanation());
                    // 先根据与正名来源相同的图书排序,再根据级别排序
                    JSONArray standardExplainArray = new JSONArray();
                    // 取出正名来源的ISBN号
                    List<String> nameBookId = nameConflictTerm.getNameSource().stream().map(obj -> ((JSONObject) obj)
                            .getString("isbn")).collect(Collectors.toList());
                    // 取出与正名来源相同的释义,根据释义分组,取第一条
                    nameConflictTerm.getExplanation().stream().filter(obj -> nameBookId
                            .contains(((JSONObject) obj).getString("isbn"))).
                            collect(Collectors.groupingBy(obj -> ((JSONObject) obj)
                                    .getString("value")))
                            .values().forEach(list -> {
                        standardExplainArray.add(list.get(0));
                    });
                    // 取出与正名来源不同的释义,根据级别排序
                    // 取出已有的释义
                    List<String> existsExplain = standardExplainArray.stream().map(obj -> ((JSONObject) obj)
                            .getString("value")).collect(Collectors.toList());
                    nameConflictTerm.getExplanation().stream().filter(obj -> !nameBookId
                            .contains(((JSONObject) obj).getString("isbn")))
                            .sorted(Comparator.comparingInt(item -> ((JSONObject) item).getInteger("category")))
                            .forEach(t -> {
                                String tmpExplain = ((JSONObject) t).getString("value");
                                if (!existsExplain.contains(tmpExplain)) {
                                    standardExplainArray.add(t);
                                }
                            });
                    nameConflictTerm.setExplanation(standardExplainArray);
                }

                // 更新标准术语
                termStandardMapper.saveBean(nameConflictTerm);
                // 向标准术语日志表存储数据, 存储的是入库术语和更新后的标准术语
                termStandardHistoryService.saveAutoUpdateHistory(termBetween, nameConflictTerm);
                // 修改中间术语表状态为已入标准术语库
                termBetweenService.lambdaUpdate().set(TermBetween::getStatus, TermBetween.STATUS_ENTER_STANDARD)
                        .set(TermBetween::getUpdatedBy, PMPHAppUtil.getCurrentUserEnName())
                        .set(TermBetween::getUpdatedTime, LocalDateTimeUtil.getNowAsString())
                        .eq(TermBetween::getId, termBetween.getId()).update();
            } else {
                // 直接入标准术语库
                noConflictBetweenList.add(termBetween);
            }
        }
        if (conflictId2TermMap.size() > 0) {
            termBookMetadata.setStatus(TermBookMetadata.STATUS_CONFLICT_STANDARD);
            termBookMetadata.setEnterStandardTime(null);
            termBookMetadata.setEnterStandardBy(null);
            List<TermBetween> conflictTermBetweenList = conflictId2TermMap.values()
                    .stream().flatMap(Collection::stream)
                    .distinct().filter(item -> item instanceof TermBetween)
                    .map(item -> (TermBetween) item)
                    .collect(Collectors.toList());
            conflictTermBetweenList.forEach(item -> {
                termBetweenService.lambdaUpdate().set(TermBetween::getStatus, TermBetween.STATUS_CONFLICT)
                        .set(TermBetween::getConflictConceptId, item.getConflictConceptId())
                        .set(TermBetween::getUpdatedBy, PMPHAppUtil.getCurrentUserEnName())
                        .set(TermBetween::getUpdatedTime, LocalDateTimeUtil.getNowAsString())
                        .eq(TermBetween::getId, item.getId()).update();
            });
            List<TermStandard> conflictTermStandardList = conflictId2TermMap.values()
                    .stream().flatMap(Collection::stream)
                    .distinct().filter(item -> item instanceof TermStandard)
                    .map(item -> (TermStandard) item)
                    .collect(Collectors.toList());
            conflictTermStandardList.forEach(item -> {
                termStandardMapper.saveBean(item);
            });
        }
        // 中间术语实体转标准术语实体,并入标准术语库,同时修改中间术语库状态
        betweenToStandard(noConflictBetweenList, termBookMetadata);
    }

    private void betweenToStandard(List<TermBetween> termBetweenList, TermBookMetadata termBookMetadata) {
        if (CollectionUtils.isEmpty(termBetweenList)) {
            return;
        }
        termBetweenList.forEach(t -> {
            // 把中间术语的同义词转成JSONArray,方便入标准术语
            TermStandard termStandard = JSONObject.parseObject(termStandardHistoryService.formatBetweenSynonym(t),
                    TermStandard.class);
            termStandard.setId(null);
            termStandard.setVersion(1);
            termStandard.setCreatedBy(termBookMetadata.getUpdatedBy());
            termStandard.setUpdatedBy(termBookMetadata.getUpdatedBy());
            termStandardMapper.saveBean(termStandard);
            // 向历史记录表存储
            TermStandardHistory termStandardHistory = JSON.parseObject(JSON.toJSONString(termStandard),
                    TermStandardHistory.class);
            termStandardHistory.setStandardId(termStandard.getId());
            termStandardHistory.setType(Const.AUTOMATIC_CREATE);
            termStandardHistory.setId(null);
            termStandardHistoryService.save(termStandardHistory);
        });
        // 修改中间术语状态
        updTermBetweenStatusByIds(termBookMetadata);
    }

    private void updTermBetweenStatusByIds(TermBookMetadata termBookMetadata) {
        // 修改中间术语表状态
        String betweenStatusSql = "update term_between set status = " + TermBetween.STATUS_ENTER_STANDARD + ", updated_by = '" + PMPHAppUtil.getCurrentUserEnName() + "', updated_time = '" + LocalDateTimeUtil.getNowAsString() + "'" +
                " where (conflict_concept_id is null or conflict_concept_id = '') and term_book_metadata_id = '" + termBookMetadata.getId() + "'";
        termBetweenService.getBaseMapper().executeUpdate(betweenStatusSql);
    }

    private String removeSameValue(String standardValue, String betweenValue) {
        String value = org.apache.commons.lang3.StringUtils.join(standardValue, ";", betweenValue);
        return Arrays.asList(value.trim().split(";"))
                .stream().distinct().filter(StringUtils::isNotBlank)
                .collect(Collectors.joining(";"));
    }


    @SuppressWarnings("unchecked")
    public Page<TermStandard> queryStandardList(TermStandard termStandard, int currentPage, int pageSize) {
        Page<TermStandard> termStandardPage = termStandardMapper.selectPage(new Page<>(currentPage, pageSize),
                Wrappers.lambdaQuery(TermStandard.class)
                        .eq(TermStandard::getDeleted, 0)
                        .and(StringUtils.isNotBlank(termStandard.getName()),
                                wrapper -> wrapper.like(TermStandard::getName, termStandard.getName())
                                        .or().like(TermStandard::getSynonym, termStandard.getName())
                                        .or().like(TermStandard::getEnglishName, termStandard.getName())
                                        .or().like(TermStandard::getEnglishNameSynonym, termStandard.getName())
                                        .or().like(TermStandard::getEnglishAbbr, termStandard.getName())
                                        .or().like(TermStandard::getEnglishAbbrSynonym, termStandard.getName())
                                        .or().like(TermStandard::getFrenchName, termStandard.getName())
                                        .or().like(TermStandard::getFrenchNameSynonym, termStandard.getName())
                                        .or().like(TermStandard::getRussianName, termStandard.getName())
                                        .or().like(TermStandard::getRussianNameSynonym, termStandard.getName())
                                        .or().like(TermStandard::getGermanName, termStandard.getName())
                                        .or().like(TermStandard::getGermanNameSynonym, termStandard.getName())
                                        .or().like(TermStandard::getJapaneseName, termStandard.getName())
                                        .or().like(TermStandard::getJapaneseNameSynonym, termStandard.getName()))
                        .orderByDesc(TermStandard::getUpdatedTime).orderByAsc(TermStandard::getId));
        return termStandardPage;
    }

    private TermBetween saveTermBetween(List<Term> itemTermList, TermBookMetadata termBookMetadata) {
        Term nameTerm = itemTermList.stream().filter(t -> (0 == t.getType())).collect(Collectors.toList()).get(0);
        List<Term> list = itemTermList.stream().filter(t -> (0 != t.getType())).collect(Collectors.toList());
        TermBetween termBetween = new TermBetween();
        termBetween.setName(nameTerm.getName());
        termBetween.setSynonym(StringUtils.join(list.stream().map(Term::getName).collect(Collectors.toList()), ";"));
        termBetween.setEnglishAbbr(nameTerm.getEnglishAbbr());
        termBetween.setEnglishAbbrSynonym(StringUtils.join(list.stream().filter(k -> (StringUtils.isNotBlank(k.getEnglishAbbr()))).map(Term::getEnglishAbbr).collect(Collectors.toList()), ";"));
        termBetween.setEnglishName(nameTerm.getEnglishName());
        termBetween.setEnglishNameSynonym(StringUtils.join(list.stream().filter(k -> (StringUtils.isNotBlank(k.getEnglishName()))).map(Term::getEnglishName).collect(Collectors.toList()), ";"));
        termBetween.setFrenchName(nameTerm.getFrenchName());
        termBetween.setFrenchNameSynonym(StringUtils.join(list.stream().filter(k -> (StringUtils.isNotBlank(k.getFrenchName()))).map(Term::getFrenchName).collect(Collectors.toList()), ";"));
        termBetween.setRussianName(nameTerm.getRussianName());
        termBetween.setRussianNameSynonym(StringUtils.join(list.stream().filter(k -> (StringUtils.isNotBlank(k.getRussianName()))).map(Term::getRussianName).collect(Collectors.toList()), ";"));
        termBetween.setGermanName(nameTerm.getGermanName());
        termBetween.setGermanNameSynonym(StringUtils.join(list.stream().filter(k -> (StringUtils.isNotBlank(k.getGermanName()))).map(Term::getGermanName).collect(Collectors.toList()), ";"));
        termBetween.setJapaneseName(nameTerm.getJapaneseName());
        termBetween.setJapaneseNameSynonym(StringUtils.join(list.stream().filter(k -> (StringUtils.isNotBlank(k.getJapaneseName()))).map(Term::getJapaneseName).collect(Collectors.toList()), ";"));
        termBetween.setDeleted(0);
        termBetween.setStatus(TermBetween.STATUS_NO_ENTER);
        termBetween.setTermBookMetadataId(termBookMetadata.getId());
        termBetween.setXmlClassify(nameTerm.getXmlClassify());
        termBetween.setClassify(nameTerm.getClassify());
        termBetween.setExplanation(nameTerm.getExplanation());
        termBetween.setCreatedBy(termBookMetadata.getUpdatedBy());
        termBetween.setUpdatedBy(termBookMetadata.getUpdatedBy());
        return termBetween;
    }

    /**
     * 解析body.xml,术语入库
     *
     * @param termBookMetadata
     */
    private void parseBody(TermBookMetadata termBookMetadata) {
        FileInputStream stream = null;
        TermXmlBody termXmlBody = null;
        try {
            File file = new File(termBookMetadata.getFilePath() + "/body.xml");
            JAXBContext jaxbContext = JAXBContext.newInstance(TermXmlBody.class);
            Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
            stream = new FileInputStream(file);
            termXmlBody = (TermXmlBody) unmarshaller.unmarshal(stream);
        } catch (Exception e) {
            log.error("解析body.xml时出错:{}", e);
            throw new RuntimeException("解析body.xml时出错:" + e.toString());
        }

        List<UncommonWord> uncommonWordList = uncommonWordService.list();
        Map<String, UncommonWord> uncommonWordMap = uncommonWordList.stream()
                .filter(item -> StringUtils.isNotEmpty(item.getCode()))
                .collect(Collectors.toMap(item -> item.getCode(), item -> item, (a, b) -> a));
        List<RuleItem> ruleItemList = ruleService.queryItemByRuleId("18");
        Map<String, Integer> termTypeMap = Maps.newConcurrentMap();
        ruleItemList.forEach(g -> {
            termTypeMap.put(g.getName(), g.getIdx());
        });
        List<TermXml> termXmlList = termXmlBody.getTermList();
        List<Term> termList = Lists.newArrayList();
        for (TermXml termXml : termXmlList) {
            // 处理xml文件中的生僻字
            handleImg(termXml, uncommonWordMap, termBookMetadata);
            String sfId = IdWorker.getIdStr();
            // 解析正名,同义词,及其相关属性
            parseTerm(termXml, termList, sfId, termTypeMap, termBookMetadata);
        }
        // 批量入库术语
        saveBatch(termList, 100);
    }

    private String mergeValue(List<TermXml> list, int type, List<TermXmlSynonym> termXmlSynonymList) {
        Set<String> tmpSet = Sets.newLinkedHashSet();
        if (1 == type) {
            tmpSet = list.stream().map(t -> t.getTermPY()).collect(Collectors.toSet());
        } else if (2 == type) {
            tmpSet = list.stream().map(t -> t.getTermEng()).collect(Collectors.toSet());
        } else if (3 == type) {
            tmpSet = list.stream().map(t -> t.getTermEngAbb()).collect(Collectors.toSet());
        } else if (4 == type) {
            tmpSet = list.stream().map(t -> t.getTermRussian()).collect(Collectors.toSet());
        } else if (5 == type) {
            tmpSet = list.stream().map(t -> t.getTermGerman()).collect(Collectors.toSet());
        } else if (6 == type) {
            tmpSet = list.stream().map(t -> t.getTermFrench()).collect(Collectors.toSet());
        } else if (7 == type) {
            tmpSet = list.stream().map(t -> t.getTermJapanese()).collect(Collectors.toSet());
        } else if (8 == type) {
            tmpSet = list.stream().map(t -> t.getClassify()).collect(Collectors.toSet());
        } else if (9 == type) {
            tmpSet = termXmlSynonymList.stream().map(t -> t.getEng()).collect(Collectors.toSet());
        } else if (10 == type) {
            tmpSet = termXmlSynonymList.stream().map(t -> t.getRussian()).collect(Collectors.toSet());
        } else if (11 == type) {
            tmpSet = termXmlSynonymList.stream().map(t -> t.getGerman()).collect(Collectors.toSet());
        } else if (12 == type) {
            tmpSet = termXmlSynonymList.stream().map(t -> t.getFrench()).collect(Collectors.toSet());
        } else if (13 == type) {
            tmpSet = termXmlSynonymList.stream().map(t -> t.getJapanese()).collect(Collectors.toSet());
        } else if (14 == type) {
            tmpSet = termXmlSynonymList.stream().map(t -> t.getGerman()).collect(Collectors.toSet());
        }
        tmpSet.removeAll(Collections.singleton(null));
        if (CollectionUtils.isEmpty(tmpSet)) {
            return null;
        }
        return StringUtils.join(tmpSet, ";");
    }

    private void parseTerm(TermXml termXml, List<Term> termList, String sfId, Map<String, Integer> termTypeMap, TermBookMetadata termBookMetadata) {
        // 正名
        Term term = new Term();
        term.setName(termXml.getTermName());
        term.setCode(termXml.getTermCode());
        term.setPinyin(termXml.getTermPY());
        term.setType(0);
        term.setEnglishName(termXml.getTermEng());
        term.setEnglishAbbr(termXml.getTermEngAbb());
        term.setFrenchName(termXml.getTermFrench());
        term.setRussianName(termXml.getTermRussian());
        term.setGermanName(termXml.getTermGerman());
        term.setJapaneseName(termXml.getTermJapanese());
        addTerm(term, termBookMetadata, sfId, termXml);
        // 解析释义并存储释义
        List<TermXmlDef> defList = termXml.getTermXmlDefList();
        if (CollectionUtils.isNotEmpty(defList)) {
            term.setExplanation(JSONArray.parseArray(JSON.toJSONString(defList.stream().map(t -> t.getTermDef()).collect(Collectors.toList()))));
        }
        termList.add(term);
        // 属性
        List<TermXmlSynonym> synonymList = termXml.getTermXmlSynonymList();
        if (CollectionUtils.isNotEmpty(synonymList)) {
            for (TermXmlSynonym synonym : synonymList) {
                Term synonymTerm = new Term();
                String className = synonym.getClassAttr();
                Integer type = termTypeMap.get(className);
                if (Objects.isNull(type)) {
                    // 向rule_item表里插入class属性
                    RuleItem ruleItem = new RuleItem();
                    ruleItem.setName(className);
                    ruleItem.setRuleId("18");
                    List<RuleItem> valueList = ruleService.queryItemByRuleId("18");
                    ruleItem.setIdx(valueList.size());
                    ruleItemService.save(ruleItem);
                    type = ruleItem.getIdx();
                    termTypeMap.put(className, type);
                }
                synonymTerm.setType(type);
                synonymTerm.setName(synonym.getTermSynonym());
                synonymTerm.setEnglishName(synonym.getEng());
                synonymTerm.setRussianName(synonym.getRussian());
                synonymTerm.setGermanName(synonym.getGerman());
                synonymTerm.setFrenchName(synonym.getFrench());
                synonymTerm.setJapaneseName(synonym.getJapanese());
                addTerm(synonymTerm, termBookMetadata, sfId, termXml);
                termList.add(synonymTerm);
            }
        }
    }

    /**
     * 处理图片
     *
     * @return
     */
    private void handleImg(TermXml termXml, Map<String, UncommonWord> uncommonWordMap, TermBookMetadata termBookMetadata) {
        termXml.setTermName(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermName()), uncommonWordMap, "name", termBookMetadata));
        termXml.setTermPY(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermPY()), uncommonWordMap, "", termBookMetadata));
        termXml.setTermEng(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermEng()), uncommonWordMap, "", termBookMetadata));
        termXml.setTermEngAbb(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermEngAbb()), uncommonWordMap, "", termBookMetadata));
        termXml.setTermRussian(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermRussian()), uncommonWordMap, "", termBookMetadata));
        termXml.setTermGerman(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermGerman()), uncommonWordMap, "", termBookMetadata));
        termXml.setTermFrench(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermFrench()), uncommonWordMap, "", termBookMetadata));
        termXml.setTermJapanese(handleValue(StringEscapeUtils.unescapeHtml(termXml.getTermJapanese()), uncommonWordMap, "", termBookMetadata));

        List<TermXmlSynonym> synonymList = termXml.getTermXmlSynonymList();
        if (CollectionUtils.isNotEmpty(synonymList)) {
            for (TermXmlSynonym termXmlSynonym : synonymList) {
                String synonym = StringEscapeUtils.unescapeHtml(termXmlSynonym.getTermSynonym());
                termXmlSynonym.setTermSynonym(handleValue(synonym, uncommonWordMap, "synonym", termBookMetadata));
            }
        }
        List<TermXmlDef> defList = termXml.getTermXmlDefList();
        if (CollectionUtils.isNotEmpty(defList)) {
            for (TermXmlDef termXmlDef : defList) {
                String def = StringEscapeUtils.unescapeHtml(termXmlDef.getTermDef());
                termXmlDef.setTermDef(handleValue(def, uncommonWordMap, "def", termBookMetadata));
            }
        }
    }

    private String handleValue(String value, Map<String, UncommonWord> uncommonWordMap,
                               String type, TermBookMetadata termBookMetadata) {
        if (StringUtils.equals(type, "name") || StringUtils.equals(type, "synonym")) {
            if (StringUtils.isBlank(value)) {
                throw new RuntimeException("termName或termSynonym节点值为空");
            }
            if (value.contains(";")) {
                throw new RuntimeException("正名或同义词包含分号:" + value);
            }
        }
        if (StringUtils.isBlank(value)) {
            return null;
        }
        org.jsoup.nodes.Document doc = Jsoup.parse(value);
        Elements bzElements = doc.body().getElementsByTag("bz");
        bzElements.forEach(bz -> {
            String id = bz.attr("id");
            UncommonWord uncommonWord = uncommonWordMap.get(id);
            if (Objects.isNull(uncommonWord)) {
                throw new RuntimeException("未知的生僻字编码:" + id);
            }
            bz.tagName("img").attr("data-similar-word", uncommonWord.getSimilarWord())
                    .attr("data-bz-id", uncommonWord.getId()).attr("title", uncommonWord.getSpellCode())
                    .attr("class", "rw_img bz").attr("src", uncommonWord.getPath()).removeAttr("id");
        });
        // 处理图片
        doc.body().getElementsByTag("img").forEach(img -> {
            if ("rw_img bz".equals(img.attr("class"))) {
                return;
            }
            String src = StringUtils.defaultIfBlank(img.attr("src"), img.attr("id"));
            if (StringUtils.isBlank(src)) {
                throw new RuntimeException("图片路径为空");
            }
            if (!new File(termBookMetadata.getFilePath() + "/" + src).exists()) {
                throw new RuntimeException("找不到对应的图片,图片路径是:" + src);
            }
            img.attr("src", termBookMetadata.getImgPath() +
                    StringUtils.substringAfter(src, "/"));
        });
        return StrUtil.removeAllLineBreaks(doc.body().html());
    }

    private void addTerm(Term term, TermBookMetadata termBookMetadata, String sfId, TermXml termXml) {
        term.setTermBookMetadataId(termBookMetadata.getId());
        term.setXmlClassify(termXml.getClassify());
        term.setConceptId(sfId);
        term.setOriginalConceptId(sfId);
        term.setCreatedBy(termBookMetadata.getUpdatedBy());
        term.setUpdatedBy(termBookMetadata.getUpdatedBy());
        term.setStatus(Term.STATUS_NO_COMPARE);
        term.setDeleted(Term.DELETED_NO);
    }

    private String bindImgSrc(TermBookMetadata termBookMetadata, String value) {
        File picsDir = new File(termBookMetadata.getFilePath() + "/pics");
        List<String> imgList = Lists.newArrayList();
        // 获取图片集合
        for (File listFile : picsDir.listFiles()) {
            if (!listFile.getName().endsWith(".xml")) {
                imgList.add(listFile.getName());
            }
        }
        // 替换图片路径
        org.jsoup.nodes.Document doc = Jsoup.parse(value.replaceAll("<Table", "<IMG"));
        Elements imgElements = doc.getElementsByTag("img");
        imgElements.forEach(ele -> {
            String picName = "";
            if ("BZ".equals(ele.attr("class"))) {
                // 因图片路径不一致，这里做兼容处理  src="图片名称" src="pics/图片名称"
                // src属性带后缀名， id不带后缀名
                picName = StringUtils.isBlank(ele.attr("src")) ? (ele.attr("id") + ".jpg") : ele.attr("src");
                if (StringUtils.isBlank(picName)) {
                    throw new RuntimeException("文前内容引用生僻字路径为空");
                }
                picName = picName.substring(picName.lastIndexOf("/") + 1);
                if (!imgList.contains(picName)) {
                    throw new RuntimeException("文前内容引用生僻字:" + picName + "不存在");
                }
            } else {
                picName = ele.attr("id");
                if (StringUtils.isBlank(picName)) {
                    throw new RuntimeException("文前内容引用图片路径为空");
                }
                picName = picName.substring(picName.lastIndexOf("/") + 1) + ".jpg";
                if (!imgList.contains(picName)) {
                    throw new RuntimeException("文前内容引用图片:" + picName + "不存在");
                }
            }
            ele.attr("src", termBookMetadata.getImgPath() + picName);
        });
        return doc.body().html();
    }


    public TermBookMetadata queryTermList(Term term) {
        TermBookMetadata termBookMetadata = termBookMetadataMapper.selectById(term.getTermBookMetadataId());
        List<Term> termList = queryTermListByBookIdAndPage(term);
        // 根据组ID或者冲突组ID分组
        Map<String, List<Term>> map = null;
        if ("conflict".equals(term.getQueryType())) {
            // 根据冲突ID分组
            List<Term> conflictList = Lists.newArrayList();
            map = groupByConceptId(termList, 1);
            int sortNo = 0;
            for (Map.Entry<String, List<Term>> stringListEntry : map.entrySet()) {
                sortNo++;
                // 根据组ID分组
                Map<String, List<Term>> conceptMap = groupByConceptId(stringListEntry.getValue(), 0);
                // 正名  key---组ID  value---正名术语对象
                List<Term> nameList = Lists.newArrayList();
                for (List<Term> itemConceptList : conceptMap.values()) {
                    Term nameTerm = itemConceptList.stream().filter(t -> (0 == t.getType())).collect(Collectors.toList()).get(0);
                    nameTerm.setSameCount(conceptMap.size());
                    nameTerm.setTermList(itemConceptList.stream().filter(t -> (0 != t.getType())).collect(Collectors.toList()));
                    nameTerm.setSortNo(sortNo);
                    nameList.add(nameTerm);
                    nameTerm.setExplainList(nameTerm.getExplanation());
                }
                conflictList.addAll(nameList);
            }
            termBookMetadata.setTermList(conflictList);
        } else {
            map = groupByConceptId(termList, 2);
            // 正名
            List<Term> nameList = Lists.newArrayList();
            for (List<Term> itemList : map.values()) {
                Term nameTerm = itemList.stream().filter(t -> (0 == t.getType())).collect(Collectors.toList()).get(0);
                nameTerm.setTermList(itemList.stream().filter(t -> (0 != t.getType())).collect(Collectors.toList()));
                nameList.add(nameTerm);
            }
            termBookMetadata.setTermList(nameList);
        }
        termBookMetadata.setTotalNum(term.getTotalNum());
        return termBookMetadata;
    }

    /**
     * 分页查询术语
     *
     * @return
     */
    private List<Term> queryTermListByBookIdAndPage(Term term) {
        List<Term> conceptList = termMapper.lambdaQuery().eq(Term::getTermBookMetadataId, term.getTermBookMetadataId())
                .eq("conflict".equals(term.getQueryType()), Term::getStatus, 1)
                .and(StringUtils.isNotBlank(term.getName()),
                        wrapper -> wrapper.like(Term::getName, term.getName())
                                .or().like(Term::getEnglishName, term.getName())
                                .or().like(Term::getEnglishAbbr, term.getName())
                                .or().like(Term::getFrenchName, term.getName())
                                .or().like(Term::getRussianName, term.getName())
                                .or().like(Term::getGermanName, term.getName())
                                .or().like(Term::getJapaneseName, term.getName()))
                .like(StringUtils.isNotBlank(term.getXmlClassify()), Term::getXmlClassify, term.getXmlClassify()).list();
        // 如果被选择的平台不为空,则说明是推送页查询,判断是否根据状态查询
        if (StringUtils.isNotBlank(term.getSelectedPlatform()) && Objects.nonNull(term.getSyncStatus())) {
            List<KnowledgeSyncStatus> syncStatusList = null;
            // 查询未推送
            if (KnowledgeSyncStatus.FAILED_KIND_WAITING.equals(term.getSyncStatus())) {
                syncStatusList = knowledgeSyncStatusService.lambdaQuery().eq(KnowledgeSyncStatus::getKnowledgeLibId, KnowledgeLib.TERM_LIB_ID)
                        .eq(KnowledgeSyncStatus::getPlatformId, term.getSelectedPlatform())
                        .in(KnowledgeSyncStatus::getKind, KnowledgeSyncStatus.FAILED_KIND_CONSUMER,
                                KnowledgeSyncStatus.FAILED_KIND_PRODUCER, KnowledgeSyncStatus.KIND_SUCCESS).list();
                if (CollectionUtils.isNotEmpty(syncStatusList)) {
                    List<String> tmpKnowledgeIdList = syncStatusList.stream().map(KnowledgeSyncStatus::getKnowledgeId)
                            .collect(Collectors.toList());
                    conceptList = conceptList.stream().filter(t -> !tmpKnowledgeIdList.contains(t.getOriginalConceptId()))
                            .collect(Collectors.toList());
                }
            } else {
                // 查询推送成功,失败,消费失败
                syncStatusList = knowledgeSyncStatusService.lambdaQuery().eq(KnowledgeSyncStatus::getKnowledgeLibId, KnowledgeLib.TERM_LIB_ID)
                        .eq(KnowledgeSyncStatus::getPlatformId, term.getSelectedPlatform())
                        .eq(KnowledgeSyncStatus::getKind, term.getSyncStatus()).list();
                if (CollectionUtils.isNotEmpty(syncStatusList)) {
                    List<String> tmpKnowledgeIdList = syncStatusList.stream().map(KnowledgeSyncStatus::getKnowledgeId)
                            .collect(Collectors.toList());
                    conceptList = conceptList.stream().filter(t -> tmpKnowledgeIdList.contains(t.getOriginalConceptId()))
                            .collect(Collectors.toList());
                } else {
                    conceptList = Lists.newArrayList();
                }
            }
        }

        if (CollectionUtils.isNotEmpty(conceptList)) {
            Set<String> set = Sets.newLinkedHashSet();
            if ("conflict".equals(term.getQueryType())) {
                for (Term item : conceptList) {
                    set.add(item.getConflictConceptId());
                }
            } else {
                for (Term item : conceptList) {
                    set.add(item.getOriginalConceptId());
                }
            }
            term.setTotalNum(set.size());
            String conceptIds = "";
            if ((term.getCurrentPage() * term.getPageSize() + term.getPageSize()) > set.size()) {
                conceptIds = StringUtils.join(new ArrayList<>(set).subList(term.getCurrentPage() * term.getPageSize(), set.size()), "','");
            } else {
                conceptIds = StringUtils.join(new ArrayList<>(set).subList(term.getCurrentPage() * term.getPageSize(), (term.getCurrentPage() * term.getPageSize()) + term.getPageSize()), "','");
            }
            if ("conflict".equals(term.getQueryType())) {
                return queryTermByConceptIds(conceptIds, 1);
            } else {
                return queryTermByConceptIds(conceptIds, 0);
            }
        }
        return Lists.newArrayList();
    }

    /**
     * 根据组ID或者冲突组ID查询数据
     *
     * @param conceptIds
     * @param type       0 根据组ID  1 根据冲突组ID
     * @return
     */
    private List<Term> queryTermByConceptIds(String conceptIds, int type) {
        String sql = "select term.*, rule_item.name as typeName, " +
                " concat_ws(';', concat('《', term_book_metadata.name, '》'), " +
                " term_book_metadata.version, term_book_metadata.isbn) as nameSource " +
                " from term left join rule_item on term.type = rule_item.idx and rule_item.rule_id = '18' " +
                " left join term_book_metadata on term.term_book_metadata_id = term_book_metadata.id  where 1 = 1 ";
        if (0 == type) {
            sql += " and term.`original_concept_id` in ('" + conceptIds + "') order by term.id";
        } else {
            sql += " and term.`conflict_concept_id` in ('" + conceptIds + "') order by term.id";
        }
        List<Term> list = termMapper.executeMultiSelect(sql);
        list.forEach(t -> {
            t.setNameSource(Stream.of(t.getNameSource().split(";")).filter(StringUtils::isNotBlank)
                    .collect(Collectors.joining("-")));
        });
        return list;
    }


    public Term detailTermById(String originalConceptId) {
        Term term = null;
        String sql = "select term.*, rule_item.name as `typeName`," +
                " concat_ws(';', concat('《', term_book_metadata.name, '》'), " +
                " term_book_metadata.version, term_book_metadata.isbn) as nameSource from " +
                " term left join rule_item on term.type = rule_item.idx and rule_item.rule_id = '18'" +
                " left join term_book_metadata on term.term_book_metadata_id = term_book_metadata.id" +
                " where term.original_concept_id = '" + originalConceptId + "'";
        List<Term> list = termMapper.executeMultiSelect(sql);
        list.forEach(t -> {
            t.setNameSource(Stream.of(t.getNameSource().split(";")).filter(StringUtils::isNotEmpty)
                    .collect(Collectors.joining("-")));
        });
        term = list.stream().filter(t -> 0 == t.getType()).collect(Collectors.toList()).get(0);
        term.setTermList(list.stream().filter(t -> 0 != t.getType()).collect(Collectors.toList()));
        return term;
    }

    public void updateMetadataById(TermBookMetadata termBookMetadata) {
        termBookMetadataMapper.saveBean(termBookMetadata);
    }


    public void selfChecking(TermBookMetadata termBookMetadata) {
        // 更新入库术语状态为已入中间术语库
        termBookMetadata.setStatus(TermBookMetadata.STATUS_ENTER_BETWEEN);
        // 查询待自检的入库术语,根据ID查询术语
        List<Term> termList = lambdaQuery().eq(Term::getTermBookMetadataId, termBookMetadata.getId()).orderByAsc(Term::getId).list();
        // 处理术语,首先根据正名合并,之后处理同一组术语的冲突
        termList = dealGroupConflict(termList);
        // 判断是否和别的术语有冲突
        termList = judgeDiffGroup(termList, termBookMetadata);
        // 处理不冲突的术语入中间术语库
        if (CollectionUtils.isNotEmpty(termList)) {
            // 入中间术语表
            Map<String, List<Term>> termMap = groupByConceptId(termList, 0);
            List<TermBetween> termBetweenList = Lists.newArrayList();
            for (Map.Entry<String, List<Term>> entry : termMap.entrySet()) {
                termBetweenList.add(saveTermBetween(entry.getValue(), termBookMetadata));
            }
            // 保存中间表
            termBetweenList.forEach(termBetweenService.getBaseMapper()::saveBean);
            // 更改原始术语状态
            String sql = "update term set status = " + Term.STATUS_ENTER_BETWEEN + " where (conflict_concept_id is null or conflict_concept_id = '') and term_book_metadata_id = '" + termBookMetadata.getId() + "' and status != 4";
            termMapper.executeUpdate(sql);
        }
    }

    /**
     * 根据组ID或者冲突组ID分组
     *
     * @param termList
     * @param type     0 根据组ID分组   1 根据冲突组ID分组  2 根据原始组ID分组
     * @return
     */
    private Map<String, List<Term>> groupByConceptId(List<Term> termList, int type) {
        Map<String, List<Term>> termMap = Maps.newLinkedHashMap();
        List<Term> conceptList = null;
        if (0 == type) {
            for (Term term : termList) {
                conceptList = termMap.get(term.getConceptId());
                if (CollectionUtils.isEmpty(conceptList)) {
                    termMap.put(term.getConceptId(), Lists.newArrayList(term));
                } else {
                    conceptList.add(term);
                }
            }
        } else if (1 == type) {
            for (Term term : termList) {
                conceptList = termMap.get(term.getConflictConceptId());
                if (CollectionUtils.isEmpty(conceptList)) {
                    termMap.put(term.getConflictConceptId(), Lists.newArrayList(term));
                } else {
                    conceptList.add(term);
                }
            }
        } else {
            for (Term term : termList) {
                conceptList = termMap.get(term.getOriginalConceptId());
                if (CollectionUtils.isEmpty(conceptList)) {
                    termMap.put(term.getOriginalConceptId(), Lists.newArrayList(term));
                } else {
                    conceptList.add(term);
                }
            }
        }
        return termMap;
    }


    public void termUpdateById(List<Term> termList) {
        // 先删除原术语
        termMapper.deleteBeansByWherePart(" original_concept_id = '" + termList.get(0).getOriginalConceptId() + "'");
        saveBatch(termList, 100);
    }


    public TermBookMetadata queryTermBetweenList(TermBetween termBetween) {
        TermBookMetadata termBookMetadata = null;
        termBookMetadata = termBookMetadataMapper.selectById(termBetween.getTermBookMetadataId());
        // 拼接JSONObject
        JSONObject bookPropertyObject = new JSONObject();
        bookPropertyObject.put("name", termBookMetadata.getName());
        bookPropertyObject.put("version", termBookMetadata.getVersion());
        bookPropertyObject.put("isbn", termBookMetadata.getIsbn());
        bookPropertyObject.put("category", termBookMetadata.getCategory());
        bookPropertyObject.put("type", Const.IMPORT);
        JSONArray jsonArray = new JSONArray();
        jsonArray.add(bookPropertyObject);
        List<TermBetween> list = getTermBetweenList(termBetween);
        if ("conflict".equals(termBetween.getQueryType())) {
            Map<String, List<TermBetween>> termBetweenMap = Maps.newLinkedHashMap();
            List<TermBetween> conceptList = null;
            for (TermBetween termBetween1 : list) {
                // 设置正名来源字段,临时字段
                termBetween1.setNameSource(jsonArray);
                conceptList = termBetweenMap.get(termBetween1.getConflictConceptId());
                if (CollectionUtils.isEmpty(conceptList)) {
                    termBetweenMap.put(termBetween1.getConflictConceptId(), Lists.newArrayList(termBetween1));
                } else {
                    conceptList.add(termBetween1);
                }
            }
            List<TermBetween> betweenList = new ArrayList<>();
            int sortNo = 0;
            for (List<TermBetween> itemConceptList : termBetweenMap.values()) {
                sortNo++;
                for (TermBetween between : itemConceptList) {
                    between.setTermBetweenCount(itemConceptList.size());
                    between.setSortNo(sortNo);
                }
                betweenList.addAll(itemConceptList);
            }
            termBookMetadata.setTermBetweenList(betweenList);

        } else {
            termBookMetadata.setTermBetweenList(list);
        }

        termBookMetadata.setTotalNum(termBetween.getTotalNum());
        return termBookMetadata;
    }

    private List<TermBetween> getTermBetweenList(TermBetween termBetween) {
        List<TermBetween> list = null;
        // 给termBetween的nameSource属性赋值
        if ("original".equals(termBetween.getQueryType())) {
            String sql = "select * from term_between where term_book_metadata_id = '" + termBetween.getTermBookMetadataId() + "' and deleted = 0 ";
            sql = getSearchSql(termBetween, sql);
            sql += " order by updated_time, id desc limit " + termBetween.getCurrentPage() * termBetween.getPageSize() + ", " + termBetween.getPageSize();
            list = termBetweenService.getBaseMapper().executeMultiSelect(sql);
            //查询总条数
            String betweenSql = "select * from term_between where term_book_metadata_id ='" + termBetween.getTermBookMetadataId() + "' and deleted = 0";
            betweenSql = getSearchSql(termBetween, betweenSql);
            List<TermBetween> betweenList = termBetweenService.getBaseMapper().executeMultiSelect(betweenSql);
            termBetween.setTotalNum(betweenList.size());
        } else {
            String sql = "select conflict_concept_id from term_between where term_book_metadata_id = '" + termBetween.getTermBookMetadataId() + "' and status=" + TermBetween.STATUS_CONFLICT + " and deleted = 0 group by conflict_concept_id ";
            // 根据正名条件查询
            sql = getConflictSql(termBetween, sql);
            sql += " limit " + termBetween.getCurrentPage() * termBetween.getPageSize() + ", " + termBetween.getPageSize();
            list = termBetweenService.getBaseMapper().executeMultiSelect(sql);

            List<String> conflictIds = list.stream().map(TermBetween::getConflictConceptId).collect(Collectors.toList());
            
            //查询总条数
            String conflictSql = "select * from term_between where term_book_metadata_id = '" + termBetween.getTermBookMetadataId() + "' and deleted = 0 ";
            conflictSql += " and status=" + TermBetween.STATUS_CONFLICT;
            // 根据正名条件查询
            conflictSql = getConflictSql(termBetween, conflictSql);
            List<TermBetween> conflictList = termBetweenService.getBaseMapper().executeMultiSelect(conflictSql);
            Set<String> set = Sets.newLinkedHashSet();
            for (TermBetween item : conflictList) {
                set.add(item.getConflictConceptId());
            }
            termBetween.setTotalNum(set.size());
            list = conflictList.stream().filter(t -> conflictIds.contains(t.getConflictConceptId())).collect(Collectors.toList());
        }

        return list;
    }

    private String getConflictSql(TermBetween termBetween, String sql) {
        if (StringUtils.isNotBlank(termBetween.getName())) {
            sql += " and conflict_concept_id in (select conflict_concept_id from term_between where " +
                    " (name like '%" + termBetween.getName() + "%' or synonym like '%" + termBetween.getName() + "%'" +
                    " or english_name like '%" + termBetween.getName() + "%' or english_name_synonym like '%" + termBetween.getName() + "%'" +
                    " or english_abbr like '%" + termBetween.getName() + "%' or english_abbr_synonym like '%" + termBetween.getName() + "%' " +
                    " or french_name like '%" + termBetween.getName() + "%' or french_name_synonym like '%" + termBetween.getName() + "%' " +
                    " or russian_name like '%" + termBetween.getName() + "%' or russian_name_synonym like '%" + termBetween.getName() + "%' " +
                    " or german_name like '%" + termBetween.getName() + "%' or german_name_synonym like '%" + termBetween.getName() + "%' " +
                    " or japanese_name like '%" + termBetween.getName() + "%' or japanese_name_synonym like '%" + termBetween.getName() + "%')" +
                    " and status= " + TermBetween.STATUS_CONFLICT + ") ";
        }
        return sql;
    }

    @NotNull
    private String getSearchSql(TermBetween termBetween, String sql) {
        // 根据正名查询
        if (StringUtils.isNotBlank(termBetween.getName())) {
            sql += " and (name like '%" + termBetween.getName() + "%' or synonym like '%" + termBetween.getName() + "%'" +
                    " or english_name like '%" + termBetween.getName() + "%' or english_name_synonym like '%" + termBetween.getName() + "%'" +
                    " or english_abbr like '%" + termBetween.getName() + "%' or english_abbr_synonym like '%" + termBetween.getName() + "%' " +
                    " or french_name like '%" + termBetween.getName() + "%' or french_name_synonym like '%" + termBetween.getName() + "%' " +
                    " or russian_name like '%" + termBetween.getName() + "%' or russian_name_synonym like '%" + termBetween.getName() + "%' " +
                    " or german_name like '%" + termBetween.getName() + "%' or german_name_synonym like '%" + termBetween.getName() + "%' " +
                    " or japanese_name like '%" + termBetween.getName() + "%' or japanese_name_synonym like '%" + termBetween.getName() + "%')";
        }
        return sql;
    }

    private List<Term> dealGroupConflict(List<Term> termList) {
        // 正名集合
        List<Term> nameList = termList.stream().filter(t -> t.getType() == 0).collect(Collectors.toList());
        // 正名相同的术语,包括术语的属性
        Map<String, List<Term>> map = Maps.newLinkedHashMap();
        for (Term term : nameList) {
            // 根据组ID分组的术语
            List<Term> crtConceptTermList = termList.stream().filter(t -> StringUtils.equals(t.getConceptId(), term.getConceptId())).collect(Collectors.toList());
            List<Term> crtList = map.getOrDefault(term.getName(), Lists.newArrayList());
            crtList.addAll(crtConceptTermList);
            map.put(term.getName(), crtList);
        }
        List<Term> afterTheMergerList = Lists.newArrayList();
        for (List<Term> itemList : map.values()) {
            // 取得正名
            Term term = itemList.stream().filter(t -> 0 == t.getType()).findFirst().get();
            // 如果有正名相同的,重新设置该组组ID, 说明最少两组术语正名相同
            if (itemList.stream().filter(t -> 0 == t.getType()).count() > 1) {
                lambdaUpdate().set(Term::getConceptId, term.getConceptId())
                        .in(Term::getId, itemList.stream().map(Term::getId).collect(Collectors.toList())).update();
            }
            // 根据名称分组
            Map<String, List<Term>> nameMap = itemList.stream()
                    .sorted(Comparator.comparingInt(Term::getType))
                    .collect(Collectors.groupingBy(Term::getName));
            for (String key : nameMap.keySet()) {
                List<Term> valueList = nameMap.get(key);
                Term afterTheMerger = valueList.get(0);
                afterTheMerger.setConceptId(term.getConceptId());
                mergeField(afterTheMerger, valueList);
                // 如果是正名
                if (StringUtils.equals(key, term.getName())) {
                    afterTheMerger.setType(0);
                    List<String> explanationList = valueList.stream().map(Term::getExplanation)
                            .filter(CollectionUtils::isNotEmpty)
                            .flatMap(item -> JSONArray.parseArray(JSON.toJSONString(item), String.class).stream())
                            .filter(StringUtils::isNotEmpty)
                            .distinct()
                            .collect(Collectors.toList());
                    afterTheMerger.setExplanation(JSONArray.parseArray(JSON.toJSONString(explanationList)));
                }
                afterTheMergerList.add(afterTheMerger);
            }
        }
        termList.removeAll(afterTheMergerList);
        if (CollectionUtils.isNotEmpty(termList)) {
            lambdaUpdate().set(Term::getDeleted, Term.DELETED_YES)
                    .set(Term::getStatus, Term.STATUS_GROUP_CONFLICT)
                    .in(Term::getId, termList.stream().map(Term::getId).collect(Collectors.toList()))
                    .update();
        }
        return afterTheMergerList;
    }

    private void mergeField(Term afterTheMerger, List<Term> valueList) {
        // 多值使用 ; 分割,对个对象之间使用 ;拼接后,可能有重复,所以 split(";") 转lists后,去重
        afterTheMerger.setXmlClassify(Arrays.asList(valueList.stream().map(Term::getXmlClassify)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setCode(Arrays.asList(valueList.stream().map(Term::getCode)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setPinyin(Arrays.asList(valueList.stream().map(Term::getPinyin)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setEnglishName(Arrays.asList(valueList.stream().map(Term::getEnglishName)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setEnglishAbbr(Arrays.asList(valueList.stream().map(Term::getEnglishAbbr)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setFrenchName(Arrays.asList(valueList.stream().map(Term::getFrenchName)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setRussianName(Arrays.asList(valueList.stream().map(Term::getRussianName)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setGermanName(Arrays.asList(valueList.stream().map(Term::getGermanName)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));

        afterTheMerger.setJapaneseName(Arrays.asList(valueList.stream().map(Term::getJapaneseName)
                .distinct().filter(StringUtils::isNotEmpty).collect(Collectors.joining(";")).split(";"))
                .stream().distinct().collect(Collectors.joining(";")));
    }

    private List<Term> judgeDiffGroup(List<Term> termList, TermBookMetadata termBookMetadata) {
        // 说明该本书中不存在冲突的术语
        if (termList.stream().map(Term::getName).distinct().count() == termList.size()) {
            return termList;
        }
        // 根据组ID分组
        Map<String, List<Term>> sameGroupTerm = termList.stream().collect(Collectors.groupingBy(Term::getConceptId));
        Map<String, List<Term>> conflictMap = Maps.newHashMap();
        Term outerTerm = null;
        Term insideTerm = null;
        Map<String, Integer> countMap = Maps.newHashMap();
        int j = 0;
        // 上一步骤已经对比同一组术语中不可能冲突
        for (int i = 0; i < termList.size(); i++) {
            outerTerm = termList.get(i);
            if (Objects.isNull(countMap.get(outerTerm.getConceptId()))) {
                countMap.put(outerTerm.getConceptId(), sameGroupTerm.get(outerTerm.getConceptId()).size());
                j = countMap.values().stream().mapToInt(Integer::intValue).sum();
            }
            for (int k = j; k < termList.size(); k++) {
                insideTerm = termList.get(k);
                // 如果內外两层冲突id不为空, 且相等, 则无需再比较;
                if (StringUtils.isNotEmpty(outerTerm.getConflictConceptId())
                        && StringUtils.equals(outerTerm.getConflictConceptId(),
                        insideTerm.getConflictConceptId())) {
                    continue;
                }
                // 说明术语冲突
                if (StringUtils.equals(outerTerm.getName(), insideTerm.getName())) {
                    // a--b--f 冲突  设冲突ID为 1  d--e 冲突 设冲突ID为2   e--f冲突 此时设置冲突ID
                    // 为2,同时更改a--b--f组的冲突ID为2
                    String outerConflictId = outerTerm.getConflictConceptId();
                    String insideConflictId = insideTerm.getConflictConceptId();
                    // 两组数据设置新的conflictId;
                    String conflictConceptId = IdWorker.getIdStr();
                    List<Term> crtConflictList = Stream.of(sameGroupTerm.get(outerTerm.getConceptId()).stream(),
                            sameGroupTerm.get(insideTerm.getConceptId()).stream(),
                            conflictMap.getOrDefault(outerConflictId, Lists.newArrayList()).stream(),
                            conflictMap.getOrDefault(insideConflictId, Lists.newArrayList()).stream())
                            .flatMap(Function.identity())
                            .distinct()
                            .peek(item -> item.setConflictConceptId(conflictConceptId))
                            .peek(item -> item.setStatus(Term.STATUS_CONFLICT))
                            .collect(Collectors.toList());
                    conflictMap.remove(outerConflictId);
                    conflictMap.remove(insideConflictId);
                    conflictMap.put(conflictConceptId, crtConflictList);
                }
            }
        }
        // 存在相互冲突术语
        List<Term> conflictList = conflictMap.values().stream().flatMap(List::stream).collect(Collectors.toList());
        if (CollectionUtils.isNotEmpty(conflictList)) {
            termBookMetadata.setStatus(TermBookMetadata.STATUS_CONFLICT);
            // 更新冲突ID
            updateBatchById(conflictList);
            termList.removeAll(conflictList);
        }
        return termList;
    }


    public TermStandard save(TermStandard termStandard) {

        String id = termStandard.getId();

        List<TermStandard> termStandards = null;
        int type = Const.MANUAL_CREATE;
        if (StringUtils.isNotBlank(id)) {
            type = Const.MANUAL_UPDATE;
            termStandards = termStandardMapper.selectBeansByWherePart(" id != '" + id + "' and deleted = " + TermStandard.DELETED_NO);
            termStandard.setVersion(termStandard.getVersion() + 1);
        } else {
            termStandards = termStandardMapper.selectBeansByWherePart(" deleted = " + TermStandard.DELETED_NO);
            termStandard.setVersion(1);
        }
        if (termStandards.size() > 0) {
            // 判断正名和同义词
            List<String> nameAndSynonymList = Lists.newArrayList();
            nameAndSynonymList.addAll(Lists.transform(termStandards, TermStandard::getName));
            termStandards.forEach(t -> {
                nameAndSynonymList.addAll(t.getSynonym().stream()
                        .map(obj -> ((JSONObject) obj).getString("value")).collect(Collectors.toList()));
            });
            if (nameAndSynonymList.contains(termStandard.getName())) {
                throw new RuntimeException("正名:" + termStandard.getName() + " 已存在，请重新输入");
            }
            termStandard.getSynonym().stream()
                    .map(obj -> ((HashMap) obj).get("value"))
                    .collect(Collectors.toList()).forEach(s -> {
                if (nameAndSynonymList.contains(s)) {
                    throw new RuntimeException("同义词:" + s + " 已存在，请重新输入");
                }
            });
        }
        termStandardMapper.saveBean(termStandard);
        saveNewVersion(termStandard.getId(), type);
        return termStandard;
    }


    public Map<String, Object> queryEditById(String id) {
        TermStandard termStandard = termStandardMapper.selectById(id);

        String englishNameSynonym = "";
        if (StringUtils.isNotBlank(termStandard.getEnglishNameSynonym())) {
            englishNameSynonym = JSONArray.toJSONString(termStandard.getEnglishNameSynonym().split(";"));
        } else {
            englishNameSynonym = "[]";
        }
        String englishName = "";
        if (StringUtils.isNotBlank(termStandard.getEnglishName())) {
            englishName = JSONArray.toJSONString(termStandard.getEnglishName().split(";"));
        } else {
            englishName = "[]";
        }
        String englishAbbr = "";
        if (StringUtils.isNotBlank(termStandard.getEnglishAbbr())) {
            englishAbbr = JSONArray.toJSONString(termStandard.getEnglishAbbr().split(";"));
        } else {
            englishAbbr = "[]";
        }
        String englishAbbrSynonym = "";
        if (StringUtils.isNotBlank(termStandard.getEnglishAbbr())) {
            englishAbbrSynonym = JSONArray.toJSONString(termStandard.getEnglishAbbrSynonym().split(";"));
        } else {
            englishAbbrSynonym = "[]";
        }
        String germanName = "";
        if (StringUtils.isNotBlank(termStandard.getGermanName())) {
            germanName = JSONArray.toJSONString(termStandard.getGermanName().split(";"));
        } else {
            germanName = "[]";
        }
        String germanNameSynonym = "";
        if (StringUtils.isNotBlank(termStandard.getGermanNameSynonym())) {
            germanNameSynonym = JSONArray.toJSONString(termStandard.getGermanNameSynonym().split(";"));
        } else {
            germanNameSynonym = "[]";
        }
        String russianName = "";
        if (StringUtils.isNotBlank(termStandard.getRussianName())) {
            russianName = JSONArray.toJSONString(termStandard.getRussianName().split(";"));
        } else {
            russianName = "[]";
        }
        String russianNameSynonym = "";
        if (StringUtils.isNotBlank(termStandard.getRussianNameSynonym())) {
            russianNameSynonym = JSONArray.toJSONString(termStandard.getRussianNameSynonym().split(";"));
        } else {
            russianNameSynonym = "[]";
        }
        String frenchName = "";
        if (StringUtils.isNotBlank(termStandard.getFrenchName())) {
            frenchName = JSONArray.toJSONString(termStandard.getFrenchName().split(";"));
        } else {
            frenchName = "[]";
        }
        String frenchNameSynonym = "";
        if (StringUtils.isNotBlank(termStandard.getFrenchNameSynonym())) {
            frenchNameSynonym = JSONArray.toJSONString(termStandard.getFrenchNameSynonym().split(";"));
        } else {
            frenchNameSynonym = "[]";
        }
        String japaneseName = "";
        if (StringUtils.isNotBlank(termStandard.getJapaneseName())) {
            japaneseName = JSONArray.toJSONString(termStandard.getJapaneseName().split(";"));
        } else {
            japaneseName = "[]";
        }
        String japaneseNameSynonym = "";
        if (StringUtils.isNotBlank(termStandard.getJapaneseNameSynonym())) {
            japaneseNameSynonym = JSONArray.toJSONString(termStandard.getJapaneseNameSynonym().split(";"));
        } else {
            japaneseNameSynonym = "[]";
        }

        Map<String, Object> map = new HashMap<>();
        map.put("id", termStandard.getId());
        map.put("name", termStandard.getName());
        map.put("version", termStandard.getVersion());
        map.put("synonym", termStandard.getSynonym());
        map.put("englishNameSynonym", englishNameSynonym);
        map.put("englishName", englishName);
        map.put("englishAbbr", englishAbbr);
        map.put("germanName", germanName);
        map.put("germanNameSynonym", germanNameSynonym);
        map.put("russianName", russianName);
        map.put("russianNameSynonym", russianNameSynonym);
        map.put("frenchName", frenchName);
        map.put("frenchNameSynonym", frenchNameSynonym);
        map.put("japaneseName", japaneseName);
        map.put("japaneseNameSynonym", japaneseNameSynonym);
        map.put("englishAbbrSynonym", englishAbbrSynonym);
        map.put("paraphrase", termStandard.getExplanation());
        map.put("nameSource", termStandard.getNameSource());
        return map;
    }


    public boolean deleteById(String id) {
        String sql = "update term_standard set deleted=" + TermStandard.DELETED_YES + " where id= '" + id + "'";
        int result = termStandardMapper.executeUpdate(sql);
        return result > 0;
    }


    public void saveNewVersion(String id, int type) {
        TermStandard term = termStandardMapper.selectById(id);
        TermStandardHistory history = JSON.parseObject(JSON.toJSONString(term), TermStandardHistory.class);
        history.setType(type);
        history.setStandardId(id);
        history.setId(null);
        history.setTermType(Const.WITH_SAME_STANDARD_TERM);
        termStandardHistoryService.saveOrUpdate(history);
    }


    public Map<String, Object> queryTermBetweenEditById(String id) {
        TermBetween termBetween = termBetweenService.getBaseMapper().selectById(id);
        TermBookMetadata termBookMetadata = termBookMetadataMapper.selectById(termBetween.getTermBookMetadataId());
        String synonym = "";
        if (StringUtils.isNotBlank(termBetween.getSynonym())) {
            synonym = JSONArray.toJSONString(termBetween.getSynonym().split(";"));
        } else {
            synonym = "[]";
        }
        String englishNameSynonym = "";
        if (StringUtils.isNotBlank(termBetween.getEnglishNameSynonym())) {
            englishNameSynonym = JSONArray.toJSONString(termBetween.getEnglishNameSynonym().split(";"));
        } else {
            englishNameSynonym = "[]";
        }
        String englishName = "";
        if (StringUtils.isNotBlank(termBetween.getEnglishName())) {
            englishName = JSONArray.toJSONString(termBetween.getEnglishName().split(";"));
        } else {
            englishName = "[]";
        }
        String englishAbbr = "";
        if (StringUtils.isNotBlank(termBetween.getEnglishAbbr())) {
            englishAbbr = JSONArray.toJSONString(termBetween.getEnglishAbbr().split(";"));
        } else {
            englishAbbr = "[]";
        }
        String germanName = "";
        if (StringUtils.isNotBlank(termBetween.getGermanName())) {
            germanName = JSONArray.toJSONString(termBetween.getGermanName().split(";"));
        } else {
            germanName = "[]";
        }
        String germanNameSynonym = "";
        if (StringUtils.isNotBlank(termBetween.getGermanNameSynonym())) {
            germanNameSynonym = JSONArray.toJSONString(termBetween.getGermanNameSynonym().split(";"));
        } else {
            germanNameSynonym = "[]";
        }
        String englishAbbrSynonym = "";
        if (StringUtils.isNotBlank(termBetween.getEnglishAbbrSynonym())) {
            englishAbbrSynonym = JSONArray.toJSONString(termBetween.getEnglishAbbr().split(";"));
        } else {
            englishAbbrSynonym = "[]";
        }
        String russianName = "";
        if (StringUtils.isNotBlank(termBetween.getRussianName())) {
            russianName = JSONArray.toJSONString(termBetween.getRussianName().split(";"));
        } else {
            russianName = "[]";
        }
        String russianNameSynonym = "";
        if (StringUtils.isNotBlank(termBetween.getRussianNameSynonym())) {
            russianNameSynonym = JSONArray.toJSONString(termBetween.getRussianNameSynonym().split(";"));
        } else {
            russianNameSynonym = "[]";
        }
        String frenchName = "";
        if (StringUtils.isNotBlank(termBetween.getFrenchName())) {
            frenchName = JSONArray.toJSONString(termBetween.getFrenchName().split(";"));
        } else {
            frenchName = "[]";
        }
        String frenchNameSynonym = "";
        if (StringUtils.isNotBlank(termBetween.getFrenchNameSynonym())) {
            frenchNameSynonym = JSONArray.toJSONString(termBetween.getFrenchNameSynonym().split(";"));
        } else {
            frenchNameSynonym = "[]";
        }
        String japaneseName = "";
        if (StringUtils.isNotBlank(termBetween.getJapaneseName())) {
            japaneseName = JSONArray.toJSONString(termBetween.getJapaneseName().split(";"));
        } else {
            japaneseName = "[]";
        }
        String japaneseNameSynonym = "";
        if (StringUtils.isNotBlank(termBetween.getJapaneseNameSynonym())) {
            japaneseNameSynonym = JSONArray.toJSONString(termBetween.getJapaneseNameSynonym().split(";"));
        } else {
            japaneseNameSynonym = "[]";
        }

        Map<String, Object> map = new HashMap<>();
        map.put("id", termBetween.getId());
        map.put("name", termBetween.getName());
        map.put("synonym", synonym);
        map.put("englishNameSynonym", englishNameSynonym);
        map.put("englishName", englishName);
        map.put("englishAbbr", englishAbbr);
        map.put("germanName", germanName);
        map.put("germanNameSynonym", germanNameSynonym);
        map.put("russianName", russianName);
        map.put("russianNameSynonym", russianNameSynonym);
        map.put("frenchName", frenchName);
        map.put("frenchNameSynonym", frenchNameSynonym);
        map.put("japaneseName", japaneseName);
        map.put("japaneseNameSynonym", japaneseNameSynonym);
        map.put("englishAbbrSynonym", englishAbbrSynonym);
        map.put("paraphrase", termBetween.getExplanation());
        map.put("nameSource", "《" + termBookMetadata.getName() + "》-" +
                (StringUtils.isBlank(termBookMetadata.getVersion()) ? "" : (termBookMetadata.getVersion() + "-"))
                + termBookMetadata.getIsbn());
        return map;
    }

    public void termBetweenSave(TermBetween termBetween) {
        termBetweenService.getBaseMapper().saveBean(termBetween);
    }

    public boolean deleteTermBetweenById(String id) {
        String sql = "update term_between set deleted = " + TermBetween.DELETED_YES + " where id = '" + id + "'";
        int result = termBetweenService.getBaseMapper().executeUpdate(sql);
        if (result > 0) {
            return true;
        } else {
            return false;
        }
    }


    public void solveConflictBookTerm(List<TermBetween> mergedTermList, String conflictConceptId, String termBookMetadataId) {
        for (TermBetween termBetween : mergedTermList) {
            termBetween.setStatus(TermBetween.STATUS_NO_ENTER);
            termBetween.setDeleted(TermBetween.DELETED_NO);
            termBetweenService.getBaseMapper().saveBean(termBetween);
        }
        // 取消原冲突id;

        termMapper.executeUpdate(("update term set " +
                " status = " + Term.STATUS_ENTER_BETWEEN) + ", " +
                " updated_by = '" + PMPHAppUtil.getCurrentUserEnName() + "', " +
                " updated_time = '" + LocalDateTimeUtil.getNowAsString() + "'," +
                " conflict_concept_id = null " +
                " where conflict_concept_id = '" + conflictConceptId + "'");

        // 查看术语图书下所有冲突, 如果没有, 则设置图书为入库完成;
        List<Term> conflictTerms = termMapper.selectBeansByWherePart("term_book_metadata_id = '" + termBookMetadataId + "' " +
                " and status = " + TermBetween.STATUS_CONFLICT);
        if (conflictTerms.size() == 0) {
            termBookMetadataMapper.executeUpdate("update term_book_metadata " +
                    " set status = " + TermBookMetadata.STATUS_ENTER_BETWEEN + ", updated_by = '" + PMPHAppUtil.getCurrentUserEnName() + "', updated_time = '" + LocalDateTimeUtil.getNowAsString() + "'" +
                    " where id = '" + termBookMetadataId + "'");
        }
    }


    public void solveConflictStandardTerm(TermStandardVo termStandardVo) {
        // 先删除原冲突标准术语;
        termStandardMapper.deleteBeansByWherePart("conflict_concept_id = '" + termStandardVo.getConflictConceptId() + "'");
        // 插入标准术语库
        for (TermStandard termStandard : termStandardVo.getStandardTermList()) {
            termStandard.setVersion(Objects.isNull(termStandard.getVersion()) ? 1 : termStandard.getVersion() + 1);
            termStandardMapper.saveOrUpdate(termStandard);
            // 通过冲突创建/更新的标准术语保存版本
            termStandardHistoryService.saveConflictVersion(termStandard, termStandardVo);
        }
        // 取消原冲突id;
        termBetweenService.getBaseMapper().executeUpdate("update term_between set " +
                " status = " + TermBetween.STATUS_ENTER_STANDARD + ", " +
                " updated_by = '" + PMPHAppUtil.getCurrentUserEnName() + "', " +
                " updated_time = '" + LocalDateTimeUtil.getNowAsString() + "', " +
                " conflict_concept_id = null " +
                " where conflict_concept_id = '" + termStandardVo.getConflictConceptId() + "'");

        termBetweenService.getBaseMapper().executeUpdate("update term_standard set " +
                " updated_by = '" + PMPHAppUtil.getCurrentUserEnName() + "', " +
                " updated_time = '" + LocalDateTimeUtil.getNowAsString() + "', " +
                " conflict_concept_id = null " +
                " where conflict_concept_id = '" + termStandardVo.getConflictConceptId() + "'");

        // 查看术语图书下所有冲突, 如果没有, 则设置图书为入库完成;
        List<TermBetween> conflictTerms = termBetweenService.getBaseMapper().selectBeansByWherePart("term_book_metadata_id = '" + termStandardVo.getTermBookMetadataId() + "' " +
                " and status = " + TermBetween.STATUS_CONFLICT);
        if (conflictTerms.size() == 0) {
            termBookMetadataMapper.executeUpdate("update term_book_metadata " +
                    " set status = " + TermBookMetadata.STATUS_STANDARD + "," +
                    " updated_by = '" + PMPHAppUtil.getCurrentUserEnName() + "', updated_time = '" + LocalDateTimeUtil.getNowAsString() + "' where id = '" + termStandardVo.getTermBookMetadataId() + "'");
        }
    }

    public TermBookMetadata querySyncList(Term term) {
        TermBookMetadata termBookMetadata = termBookMetadataMapper.selectById(term.getTermBookMetadataId());
        List<Term> termList = queryTermListByBookIdAndPage(term);
        // 根据组ID分组
        Map<String, List<Term>> map = groupByConceptId(termList, 2);

        Map<String, KnowledgeSyncStatus> syncStatusMap = Maps.newHashMap();

        if (map.keySet().size() > 0) {
            List<KnowledgeSyncStatus> knowledgeSyncStatusList = knowledgeSyncStatusService.lambdaQuery()
                    .in(KnowledgeSyncStatus::getKnowledgeId, map.keySet()).list();
            if (CollectionUtils.isNotEmpty(knowledgeSyncStatusList)) {
                syncStatusMap = knowledgeSyncStatusList.stream()
                        .collect(Collectors.toMap(KnowledgeSyncStatus::getKnowledgeId, Function.identity()));
            }
        }

        // 正名
        List<Term> nameList = Lists.newArrayList();
        for (List<Term> itemList : map.values()) {
            Term nameTerm = itemList.stream().filter(t -> (0 == t.getType())).collect(Collectors.toList()).get(0);
            nameTerm.setSyncStatus(syncStatusMap.getOrDefault(nameTerm.getOriginalConceptId(), new KnowledgeSyncStatus()).getKind());
            nameTerm.setOperatedTime(syncStatusMap.getOrDefault(nameTerm.getOriginalConceptId(), new KnowledgeSyncStatus()).getOperatedTime());
            nameTerm.setTermList(itemList.stream().filter(t -> (0 != t.getType())).collect(Collectors.toList()));
            nameList.add(nameTerm);
        }
        termBookMetadata.setTermList(nameList);
        termBookMetadata.setTotalNum(term.getTotalNum());
        return termBookMetadata;
    }

    @Transactional(propagation = Propagation.NOT_SUPPORTED)
    public void syncToPlatform(Term term, String userName, String operation) {
        // 当前列表数据全部推送
        List<Term> list = Lists.newArrayList();
        // 当前列表数据全部推送
        if (StringUtils.isBlank(term.getSyncConceptIds())) {
            list = lambdaQuery().eq(Term::getTermBookMetadataId, term.getTermBookMetadataId())
                    .eq(Term::getDeleted, Const.DELETED_NO)
                    .like(StringUtils.isNotBlank(term.getName()), Term::getName, term.getName())
                    .like(StringUtils.isNotBlank(term.getXmlClassify()), Term::getXmlClassify, term.getXmlClassify())
                    .list();
        } else {
            // 页面选中的推送
            list = lambdaQuery().in(Term::getOriginalConceptId, term.getSyncConceptIds().split(",")).list();
        }
        syncData(list, term, userName, operation);
    }

    @Transactional(propagation = Propagation.NOT_SUPPORTED)
    public void syncBookToPlatform(TermBookMetadata termBookMetadata, String userName, String operation) {
        // 当前列表数据全部推送
        List<TermBookMetadata> list = Lists.newArrayList();
        // 当前列表数据全部推送
        if (StringUtils.isBlank(termBookMetadata.getSyncIds())) {
            list = termBookMetadataMapper.lambdaQuery()
                    .like(StringUtils.isNotBlank(termBookMetadata.getName()), TermBookMetadata::getName, termBookMetadata.getName())
                    .eq(Objects.nonNull(termBookMetadata.getCategory()), TermBookMetadata::getCategory, termBookMetadata.getCategory())
                    .isNotNull(TermBookMetadata::getCategory)
                    .like(StringUtils.isNotBlank(termBookMetadata.getVersion()), TermBookMetadata::getVersion, termBookMetadata.getVersion())
                    .like(StringUtils.isNotBlank(termBookMetadata.getIsbn()), TermBookMetadata::getIsbn, termBookMetadata.getIsbn())
                    .notIn(TermBookMetadata::getStatus, 0, 4)
                    .list();
        } else {
            // 页面选中的推送
            list = termBookMetadataMapper.lambdaQuery().in(TermBookMetadata::getId, termBookMetadata.getSyncIds().split(",")).list();
        }
        // 一本一本书的推送
        List<String> bookIdList = list.stream().map(TermBookMetadata::getId).collect(Collectors.toList());
        // 根据图书ID查询所有术语,暂时不考虑1000本
        List<Term> termList = lambdaQuery().in(Term::getTermBookMetadataId, bookIdList).list();
        for (String bookId : bookIdList) {
            Term term = new Term();
            term.setTermBookMetadataId(bookId);
            term.setSelectedPlatform(termBookMetadata.getSelectedPlatform());
            syncData(termList.stream().filter(t -> t.getTermBookMetadataId().equals(bookId)).collect(Collectors.toList()),
                    term, userName, operation);
        }
    }

    @Transactional(propagation = Propagation.NOT_SUPPORTED)
    public void syncWaitAndFailedToPlatform(String platformId, String userName, String operation) {
        // 删除除推送成功外的数据
        knowledgeSyncStatusService.remove(Wrappers.lambdaQuery(KnowledgeSyncStatus.class)
                .eq(KnowledgeSyncStatus::getKnowledgeLibId, KnowledgeLib.TERM_LIB_ID)
                .eq(KnowledgeSyncStatus::getPlatformId, platformId)
                .ne(KnowledgeSyncStatus::getKind, KnowledgeSyncStatus.KIND_SUCCESS));
        // 查询出推送成功的
        List<KnowledgeSyncStatus> successList = knowledgeSyncStatusService.lambdaQuery()
                .eq(KnowledgeSyncStatus::getKnowledgeLibId, KnowledgeLib.TERM_LIB_ID)
                .eq(KnowledgeSyncStatus::getPlatformId, platformId)
                .list();
        List<Term> list = lambdaQuery().list();
        if (CollectionUtils.isNotEmpty(successList)) {
            List<String> successIdList = successList.stream()
                    .map(KnowledgeSyncStatus::getKnowledgeId).collect(Collectors.toList());
            // 过滤掉推送成功的, 推送未发送,推送失败,消费失败的数据
            list = list.stream().filter(t -> !successIdList.contains(t.getId())).collect(Collectors.toList());
        }
        // 一本一本书的推送
        List<String> bookIdList = list.stream().map(Term::getTermBookMetadataId).collect(Collectors.toList());
        for (String bookId : bookIdList) {
            Term term = new Term();
            term.setTermBookMetadataId(bookId);
            term.setSelectedPlatform(platformId);
            syncData(list.stream().filter(t -> t.getTermBookMetadataId().equals(bookId)).collect(Collectors.toList()),
                    term, userName, operation);
        }
    }

    private void syncData(List<Term> list, Term term, String userName, String operation) {
        log.info("图书ID:【" + term.getTermBookMetadataId() + "】推送术语开始");
        if (CollectionUtils.isEmpty(list)) {
            log.info("图书ID:【" + term.getTermBookMetadataId() + "】待推送术语为空");
            return;
        }
        // 组装待推送数据
        Map<Integer, RuleItem> ruleItemMap = ruleItemService.lambdaQuery().eq(RuleItem::getRuleId, "18").list()
                .stream().collect(Collectors.toMap(RuleItem::getIdx, Function.identity()));
        // 修改type字段
        List<TermSync> tmpSyncTermList = JSON.parseArray(JSON.toJSONString(list), TermSync.class);
        tmpSyncTermList.forEach(t -> {
            t.setType(ruleItemMap.get(Integer.valueOf(t.getType())).getName());
        });
        // 根据conceptid分组后,保持原list顺序
        Map<String, List<TermSync>> termSyncMap = tmpSyncTermList.stream().collect(Collectors
                .groupingBy(TermSync::getOriginalConceptId, LinkedHashMap::new, Collectors.toList()));
        // 待推送List
        List<TermSync> syncTermList = Lists.newArrayList();
        termSyncMap.keySet().forEach(t -> {
            TermSync syncTerm = termSyncMap.get(t).stream().filter(t1 -> "正名".equals(t1.getType())).findFirst().get();
            syncTerm.setSynonym(termSyncMap.get(t).stream().filter(t1 -> !"正名".equals(t1.getType())).collect(Collectors.toList()));
            syncTermList.add(syncTerm);
        });
        // 先同步图书元数据,一本一本的同步,如果图书元数据推送失败,则该图书的术语不再推送
        // 只推未推送的数据元数据
        List<TermBookMetadata> termBookMetadataList = termBookMetadataMapper.lambdaQuery()
                .eq(TermBookMetadata::getId, term.getTermBookMetadataId())
                .eq(TermBookMetadata::getSyncStatus, Const.SYNC_NO).list();
        // 记录术语元数据同步日志
        Map<String, String> syncDataMap = Maps.newHashMap();
        syncDataMap.put("knowledgeId", term.getTermBookMetadataId());
        syncDataMap.put("syncPlatform", term.getSelectedPlatform());
        syncDataMap.put("userName", userName);
        syncDataMap.put("knowledgeLibId", KnowledgeLib.TERM_LIB_ID);
        if (CollectionUtils.isNotEmpty(termBookMetadataList)) {
            // 如果图书元数据推送成功,则推送该图书下的术语
            JSONObject metadata = JSON.parseObject(termBookMetadataList.get(0).getMetadata());
            metadata.put("Category", Const.TERM_BOOK_CATEGORY.get(termBookMetadataList.get(0).getCategory()));
            metadata.put("id", termBookMetadataList.get(0).getId());
            metadata.put("version", termBookMetadataList.get(0).getVersion());
            JSONObject bookMetadata = new JSONObject();
            bookMetadata.put("option", "add");
            bookMetadata.put("type", Const.TERM_METADATA);
            bookMetadata.put("data", metadata);
            boolean sendFlag = RocketMQProducerUtil.sendMessage(term.getSelectedPlatform(),
                    Const.TERM_METADATA, termBookMetadataList.get(0).getName(), JSON.toJSONString(bookMetadata));
            knowledgeSyncStatusService.insertSync(syncDataMap, sendFlag);
            if (!sendFlag) {
                log.error("图书ID:【" + term.getTermBookMetadataId() + "】推送元数据失败");
                return;
            }
            // 同时修改图书状态,只更新状态,其他属性不变
            termBookMetadataMapper.update(null, new LambdaUpdateWrapper<TermBookMetadata>()
                    .set(TermBookMetadata::getSyncStatus, Const.SYNC_YES)
                    .eq(TermBookMetadata::getId, term.getTermBookMetadataId()));
        }
        // 推送
        syncTermList.forEach(syncTerm -> {
            JSONObject termObject = new JSONObject();
            termObject.put("type", KnowledgeSystem.TERM.value);
            termObject.put("option", operation);
            termObject.put("data", syncTerm);
            boolean sendFlag2 = RocketMQProducerUtil.sendMessage(term.getSelectedPlatform(),
                    KnowledgeSystem.TERM.value, syncTerm.getName(), JSON.toJSONString(termObject, SerializerFeature.WriteNullListAsEmpty,
                            SerializerFeature.WriteNullStringAsEmpty));
            // 记录同步日志
            syncDataMap.put("knowledgeId", syncTerm.getOriginalConceptId());
            knowledgeSyncStatusService.insertSync(syncDataMap, sendFlag2);
        });
        log.info("图书ID:【" + term.getTermBookMetadataId() + "】推送术语结束");
    }
}
