package com.yuntsg.ruiijn.paperana.service;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.http.HttpUtil;
import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch.core.bulk.BulkOperation;
import co.elastic.clients.elasticsearch.indices.CreateIndexResponse;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.aspose.words.ControlChar;
import com.aspose.words.Document;
import com.aspose.words.SaveFormat;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.conditions.query.LambdaQueryChainWrapper;
import com.yuntsg.nnsfcp.front.entity.BidDocumentRecordArticle;
import com.yuntsg.nnsfcp.front.entity.Employee;
import com.yuntsg.nnsfcp.front.mapper.BidDocumentRecordArticleMapper;
import com.yuntsg.nnsfcp.service.front.impl.UpLoadServiceImpl;
import com.yuntsg.ruiijn.database.entity.PaperAna;
import com.yuntsg.ruiijn.database.entity.PaperAnaTodo;
import com.yuntsg.ruiijn.database.entity.TextSelfDb;
import com.yuntsg.ruiijn.database.entity.WosBasic;
import com.yuntsg.ruiijn.database.mapper.PaperAnaMapper;
import com.yuntsg.ruiijn.database.mapper.PaperAnaTodoMapper;
import com.yuntsg.ruiijn.database.mapper.TextSelfDbMapper;
import com.yuntsg.ruiijn.database.mapper.WosBasicMapper;
import com.yuntsg.ruiijn.databse_search.entity.FileDetailsTxt;
import com.yuntsg.ruiijn.databse_search.entity.FileRecord;
import com.yuntsg.ruiijn.databse_search.service.FileDetailTxtService;
import com.yuntsg.ruiijn.databse_search.service.FileRecordsService;
import com.yuntsg.ruiijn.paperana.entity.*;
import com.yuntsg.ruiijn.paperana.utils.PublicUtils;
import com.yuntsg.ruiijn.paperana.utils.SmallTool;
import com.yuntsg.ruiijn.paperana.utils.StrUtils;
import com.yuntsg.ruiijn.paperana.utils.esutils.EsSearch;
import com.yuntsg.ruiijn.paperana.utils.esutils.EsServerEntity;
import com.yuntsg.ruiijn.paperana.utils.esutils.EsServerEntityTemp;
import com.yuntsg.ruiijn.paperana.utils.esutils.IndexConfig;
import common.util.*;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.impl.client.BasicCookieStore;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.util.*;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import static com.yuntsg.ruiijn.paperana.service.ArticleRepeatSummary.comprehensive;
import static com.yuntsg.ruiijn.paperana.utils.DateUtil.StringToDate;
import static com.yuntsg.ruiijn.paperana.utils.StrUtils.getLemma;
import static common.util.ExtractUtil.isReferences;

/**
 * @Author: gcr
 * @Date: 2023/8/31 15:57
 */
@Service
@Slf4j


public class MatchService {
    @Autowired
    private ElasticsearchClient client;

    @Value("${upload.path}")
    public String uploadPath;

    @Value("${central.api.url}")
    public String escenterUrl;

    @Resource
    BidDocumentRecordArticleMapper bidDocumentRecordArticleMapper;

    @Resource
    UpLoadServiceImpl upLoadService;

    @Resource
    ElasticsearchClient esClient;
    @Resource
    TextSelfDbMapper textSelfDbMapper;
    @Resource
    ExtractUtil extractUtil;
    @Resource
    PaperAnaMapper paperAnaMapper;
    @Resource
    FileRecordsService fileRecordsService;
    @Resource
    FileDetailTxtService fileDetailsTxtService;
    @Resource
    WosBasicMapper wosBasicMapper;
    @Resource
    EsSearch esSearch;
    @Resource
    PaperAnaTodoMapper paperAnaTodoMapper;
    @Resource
    private ElasticsearchClient txtEsClient;


//    public void addZFLComService(String artYear, String orgPdfpath, String str, Integer masterId, Integer pushFlag, String annex, String master, String author, String belongDate, String title, Integer wosId, Integer uid, String key, Integer type, String pmorut, Integer selfTxt) {
//        //采用循环查询方式进行 一瞬间不会得到结果
//        //type =1 是指的来自张福垒的数据过来的
//        System.out.println("123");
//        if (type != null && type == 1) {
//            System.out.println("参数文本长度二___" + str.length());
//            log.info("福垒一键分析过来的");
//            if (uid == null) {
//                uid = userUtil.getUid();
//            }
//            Employee employee = employeeMapper.selectById(uid);
//            CheckUtil.check(StringUtil.isNotNull(str), "上传文件内容为空");
//            long date = System.currentTimeMillis();
//            PaperAna paperAna = paperAnaMapper.selectOne(new QueryWrapper<PaperAna>().eq("rekey", key));
//            if (paperAna == null) {
//                PaperAna paperAnaNew = new PaperAna();
//                paperAnaNew.setUploadName(sysUser.getRealName());
//                paperAnaNew.setWosId(wosId);
//                paperAnaNew.setPushFlag(pushFlag);
//                paperAnaNew.setAnnex(annex);
//                paperAnaNew.setMaster(master);
//                paperAnaNew.setMasterId(masterId);
//                paperAnaNew.setAuthor(author);
//                paperAnaNew.setBelongDate(belongDate);
//                paperAnaNew.setUid(uid);
//                paperAnaNew.setTitle(title);
//                paperAnaNew.setRekey(key);
//                paperAnaNew.setContent(str);
//                paperAnaNew.setStatus(0);
//                paperAnaNew.setAdmCheck(0);
//                paperAnaNew.setUptime(System.currentTimeMillis());
//                paperAnaNew.setIsDel(0);
//                paperAnaNew.setSysCreateTime(LocalDateTime.now());
//                paperAnaNew.setSysUpdateTime(LocalDateTime.now());
//                paperAnaMapper.insert(paperAnaNew);
//            }
//            caculateStr(artYear, orgPdfpath, str, key, uid, pmorut, selfTxt);
//
//        } else {
//            log.info("gcr 文章分析过来的");
//            if (uid == null) {
//                uid = userUtil.getUid();
//            }
//            Employee employee = employeeMapper.selectById(uid);
//            CheckUtil.check(StringUtil.isNotNull(str), "上传文件内容为空");
//            long date = System.currentTimeMillis();
//            key = Code.digestData(date + uid + "paperana");
//            PaperAna paperAna = paperAnaMapper.selectOne(new QueryWrapper<PaperAna>().eq("uid", uid).eq("rekey", key));
//            if (paperAna == null) {
//                PaperAna paperAnaNew = new PaperAna();
//                paperAnaNew.setUploadName(sysUser.getRealName());
//                paperAnaNew.setWosId(wosId);
//                paperAnaNew.setPushFlag(pushFlag);
//                paperAnaNew.setAnnex(annex);
//                paperAnaNew.setMaster(master);
//                paperAnaNew.setMasterId(masterId);
//                paperAnaNew.setAuthor(author);
//                paperAnaNew.setBelongDate(belongDate);
//                paperAnaNew.setUid(uid);
//                paperAnaNew.setTitle(title);
//                paperAnaNew.setRekey(key);
//                paperAnaNew.setContent(str);
//                paperAnaNew.setStatus(0);
//                paperAnaNew.setAdmCheck(0);
//                paperAnaNew.setUptime(System.currentTimeMillis());
//                paperAnaNew.setIsDel(0);
//                paperAnaNew.setSysCreateTime(LocalDateTime.now());
//                paperAnaNew.setSysUpdateTime(LocalDateTime.now());
//                paperAnaMapper.insert(paperAnaNew);
//            }
//            caculateStr(artYear, orgPdfpath, str, key, uid, pmorut, selfTxt);
//
//        }
//
//
//    }

    //计算得分
    public static Double caPer(Integer keyStr) {
        int yuanShiMax = 16; // 设定的原始最大
        int yuanShiMin = 4; // 设定的原始最小
        double xiangYaoMax = 1.0; // 设定的想要的最大得分
        double xiangYaoMin = 0.5; // 设定的想要的最小得分

        if (keyStr >= 16) {
            return 1.0;
        }
        double v = PublicUtils.forceUnifyScoreReal(yuanShiMax, yuanShiMin, xiangYaoMax, keyStr, xiangYaoMin);
        return v;

    }

    /**
     * @param org
     * @param from
     * @return
     */
    public static double SameCaculate(List<String> org, List<String> from) {
        //  思路 拿取原始句子每句话与结果集合进行匹配 拿到最佳匹配额度相似度计算出重复单词数 最后累加,除以 原始文章字数 得到相似度
        Double res = 0.0;
        //最大匹配句子单词数
        //新办法 只有重复单词占句子总字数的50%以上才是重复
        Integer allSameCaculate = 0;
        // 原始句子相思树
        Integer orgWordSize = 0;
        for (String orgs : org) {
            int max = 0;
            orgWordSize += orgs.split(" ").length;
            for (String froms : from) {
                if (StringUtil.isNotNull(froms) && froms.split(" ").length >= 3) {
                    // 这里到底怎么处理呢? 是都用list 存储 有多少相似词就算多少次? 后续讨论吧
                    Set<String> orgsSet = Arrays.asList(orgs.split(" ")).stream().collect(Collectors.toSet());
                    List<String> fromsstrList = new ArrayList<>();
                    String[] sTemp = froms.split(" ");
                    for (String s1 : sTemp) {
                        if (s1.length() > 3) {
                            fromsstrList.add(getLemma(s1).toLowerCase());
                        }
                    }
                    Integer sameSize = StrUtils.sentenceParseWordNew(fromsstrList, orgsSet);
                    Integer orgsSetSize = orgsSet.size();
                    if (sameSize.doubleValue() / orgsSetSize.doubleValue() > 0.5) {
                        if (sameSize >= max) {
                            max = sameSize;
                        }
                    }
                }
            }
            allSameCaculate += max;
        }
        double v = PublicUtils.takeTwoDigits(allSameCaculate.doubleValue() / orgWordSize.doubleValue());
        if (v >= 0.0) {
            res = PublicUtils.takeTwoDigits(v * 100);
        }

        return res;
    }

//    public void geneWord(Integer id) {
//        try {
//            PaperAna paperAna = paperAnaMapper.selectById(id);
//            CheckUtil.check(StringUtils.isNotEmpty(paperAna.getResult()), "结果未生成 请稍等");
//            OutEntity outEntity = JSONObject.parseObject(paperAna.getResult(), OutEntity.class);
//            generateToWord(uploadPath, outEntity, paperAna);
//        } catch (Exception e) {
//            CheckUtil.check(false, "生成word 异常");
//        }
//
//    }

    public static List<List<String>> pdfToSentenceWords(List<String> strList) {
        // 把整篇论文整合成一个大字符串，再按句号加空格分隔成一句句的集合，转换之前转换成小写。
        String[] sentenceSplit = CollUtil.join(strList, " ").toLowerCase().replaceAll(" +", " ").split("\\. ");
        List<List<String>> resultList = new ArrayList<>();
        for (String sentence : sentenceSplit) {
            if (!isReferences(sentence)) {
                // 如果该句子不是参文，一句句话先进行斯坦福转换，再以空格切割成一个个的单词。
                String[] words = getLemma(sentence).replace(",", " ").replace(";", " ").replace(":", " ").replaceAll(" +", " ").split(" ");
                List<String> list = new ArrayList<>();
                for (String word : words) {
                    if (word.length() > 3) {
                        list.add(word);
                    }
                }
                // 如果单词的个数大于3个，且每个单词不少于3位，加入结果集合中。
                if (list.size() > 3) {
                    resultList.add(list);
                }
            }
        }
        return resultList;
    }

    // 获取检测论文的句子中，在对比论文中，包含的最多单词个数的句子的单词的个数。
    public static int getMaxSentenceWordNum(List<String> fromWordList, List<List<String>> toSentenceList) {
        int num = 0;
        String fromWords = CollUtil.join(fromWordList, " ");
        for (List<String> toWordList : toSentenceList) {
            int sum = 0;
            for (String toWord : toWordList) {
                sum = (fromWords.contains(toWord)) ? (sum + 1) : sum;
            }
            num = Math.max(sum, num);
        }
        return num;
    }

    // 检测句子排除掉才参文 部分pdf 拆分出来的有问题 参文不能体现在报告里面 比对开始就排除掉 2024年3月18日20:42:51  需要进行核对
    public static boolean checkReferenceExclude(String s) {
        // 目前发现的参文格式 2024年3月18日20:45:57
        boolean isReference = false;
//        String str="Heritability studies have confirmed that genetic variation among individuals with hypertension accounts for approximately 3060 of the variation Liu et al., 2015.";
        String str = s;
        String l = str.toLowerCase();
        if (!Pattern.compile("(19[0-9]{2},|19[0-9]{2}\\.|19[0-9]{2} |20[0-9]{2},|20[0-9]{2}\\.|20[0-9]{2} )").matcher(str).find() && !l.contains("et al.") && !l.contains("dx.doi.org/10.") && !l.contains("doi10.") && !l.contains("doi 10.") && !l.contains("doi:10.")) {
        } else {
            isReference = true;
        }
        return isReference;


    }

    /**
     * 葛成瑞: 18678297353
     * // 强制重试任务
     */
    @Async
    public void manualData(Integer id, Integer fource) {
        //十分钟查询自定义文字对比进行处理
        log.info("处理自定义对比没处理的订单id++" + id);
        if (fource != null && fource == 1) {
            List<WosBasic> eq = new LambdaQueryChainWrapper<>(wosBasicMapper).eq(WosBasic::getId, id).list();
            log.info("强制重试任务++id" + id);
            for (WosBasic wosBasic : eq) {
                log.info("处理任务++wosBasic+" + wosBasic.getId());
                wosBasic.setTxtAnaFlag(-1);
                wosBasicMapper.updateById(wosBasic);
                // 进行处理
                dealManualWordCheck(wosBasic);
                wosBasicMapper.updateById(wosBasic);
            }
        } else {
            List<WosBasic> eq = new LambdaQueryChainWrapper<>(wosBasicMapper).eq(WosBasic::getTxtAnaFlag, 0).eq(WosBasic::getSoType, 3).eq(WosBasic::getId, id).list();
            for (WosBasic wosBasic : eq) {
                log.info("处理任务++wosBasic+" + wosBasic.getId());
                wosBasic.setTxtAnaFlag(-1);
                wosBasicMapper.updateById(wosBasic);
                // 进行处理
                dealManualWordCheck(wosBasic);
                wosBasicMapper.updateById(wosBasic);
            }
        }

    }

    /**
     * 2024年3月29日13:30:48 新版本模板测试输出
     *
     * @param uploadPath
     * @param outRes
     * @param paperAna
     * @return
     */
    @SneakyThrows
    public String generateToWord2(String uploadPath, OutEntity outRes, PaperAna paperAna, WosBasic wosBasic) {
        // 进行转化小数点
        Integer ayear = outRes.getAyear();
        //2023年9月12日10:59:30
        /*
         * 采用模板生成数据
         * */
        try {
            extractUtil.getLicense();
        } catch (Exception e) {

        }
        String resNote = "";
        if (wosBasic != null) {
            Integer bidId = wosBasic.getBidId();
            if (bidId != null) {
                BidDocumentRecordArticle bidDocumentRecordArticle = bidDocumentRecordArticleMapper.selectById(bidId);
                if (StringUtil.isNotNull(bidDocumentRecordArticle)) {
                    resNote = bidDocumentRecordArticle.getResNote();
                }
            }
        }

        Calendar calendar = Calendar.getInstance();
        String date = "" + calendar.get(Calendar.YEAR) + (calendar.get(Calendar.MONTH) + 1) + calendar.get(Calendar.DATE);
        String fileName = date + System.currentTimeMillis() + ".docx";
//        String fileNamePdf = fileName.replace(".docx", "") + ".pdf";
        String author = "";
        String id = "";

        String selfextra = "";
        int highSentensSize = 0;
        int middleSentensSize = 0;
        int lowSentensSize = 0;
        if (StringUtil.isNotNull(outRes.getSelfextra())) {
            selfextra = outRes.getSelfextra();
        } else {
            selfextra = "否";
        }
        if (StringUtil.isNotNull(paperAna.getId())) {
            id = paperAna.getId() + "";
        }
        id = String.valueOf(Integer.valueOf(id) + 16351520);
        if (StringUtil.isNotNull(paperAna.getAuthor())) {
            author = paperAna.getAuthor() + "";
        }

        List<SentenceDiskLevel> sentenceDiskLevel = outRes.getSentenceDiskLevel();
        //获取自己的作者名 9 种变体
        String format = DateUtil.format(new Date(), "yyyy-MM-dd");
        String format2 = DateUtil.format(new Date(), "yyyy-MM-dd HH:mm");
        String checktime = format2;
        String titleAuthor = outRes.getTitleAuthor();
        String fromPath = uploadPath + "template/articleRes3.docx";
        if (StrUtils.isRuiJin()) {
            fromPath = uploadPath + "template/articleRes2ruijin.docx";
        }
        String title1 = paperAna.getTitle();
        // ___
        if (title1.contains("___")) {
            title1 = title1.replace("___", "_");
        }


        Document doc = new Document(fromPath);

        if (StringUtil.isNotNull(resNote)) {
            doc.getRange().replace("{{resNOTE}}", resNote, false, false);
        } else {
            doc.getRange().replace("{{resNOTE}}", "无", false, false);

        }

        doc.getRange().replace("{{title}}", title1, false, false);
        //  下面try 防止zfl 过来后是null 导致替换失败
        try {
            doc.getRange().replace("{{endtime}}", format, false, false);
        } catch (Exception e) {
            doc.getRange().replace("{{endtime}}", "", false, false);

        }

        try {
            doc.getRange().replace("{{titleauthor}}", titleAuthor, false, false);
        } catch (Exception e) {
            doc.getRange().replace("{{titleauthor}}", "", false, false);

        }
        try {
            doc.getRange().replace("{{selfextra}}", selfextra, false, false);
        } catch (Exception e) {
            doc.getRange().replace("{{selfextra}}", "", false, false);

        }
        try {
            doc.getRange().replace("{{md5}}", outRes.getFileMd5(), false, false);
        } catch (Exception e) {
            doc.getRange().replace("{{md5}}", "", false, false);

        }
        try {
            doc.getRange().replace("{{author}}", author, false, false);

        } catch (Exception e) {
            doc.getRange().replace("{{author}}", "", false, false);

        }
        try {
            doc.getRange().replace("{{id}}", id, false, false);


        } catch (Exception e) {
            doc.getRange().replace("{{id}}", "", false, false);

        }
        try {
            doc.getRange().replace("{{checktime}}", checktime, false, false);
        } catch (Exception e) {
            doc.getRange().replace("{{checktime}}", "", false, false);

        }
        // 添加单篇相似度
        List<OutEntity.BestArticle> bestArticle = outRes.getBestArticles();
        if (bestArticle != null && bestArticle.size() > 0) {
            OutEntity.BestArticle bestArticle1 = bestArticle.get(0);
            doc.getRange().replace("{{sinsimilar}}", bestArticle1.getSimilarSize(), false, false);
        } else {
            doc.getRange().replace("{{sinsimilar}}", "0.0%", false, false);
        }


//        Table table = (Table) doc.getChild(NodeType.TABLE, 0, true);
        List<String> high = new ArrayList<>();
        List<String> middle = new ArrayList<>();
        List<String> low = new ArrayList<>();

        String breakLine = ControlChar.LINE_BREAK;
        int breakSize = 15;

//        Integer sentenceSize = 0;
//        double sentenceSizeAll = 0.0;
        List<SentenceEntity> highriskparam = new ArrayList<>();
        List<SentenceEntity> middleghrisk = new ArrayList<>();
        List<SentenceEntity> Lowghrisk = new ArrayList<>();
        List<SentenceEntity> highriskAll = new ArrayList<>();
        for (SentenceDiskLevel searchEntity : sentenceDiskLevel) {
//            log.info("风险等级+"+searchEntity.getLevelName());
            List<SentenceEntity> ls = searchEntity.getLs();
            for (int i = 0; i < ls.size(); i++) {
                SentenceEntity l = ls.get(i);
                l.setMaxDoubleMatchD(Double.valueOf(l.getMaxDoubleMatch().replace("%", "")));
                String str = StrUtils.DelOtherLower(l.getStr());
                //这里也排除下
                BackMatchEntity backMatchEntity = l.getBackMatchEntity();
                if (StringUtil.isNotNull(ayear) && StringUtil.isNotNull(backMatchEntity) && StringUtil.isNotNull(backMatchEntity.getYear()) && Integer.parseInt(backMatchEntity.getYear()) >= ayear.intValue()) {
                    log.info("有文章发表的年且疑似句子发表年大于等于文章年 略过");
                    continue;
                }
                if (searchEntity.getLevelName().equals(LevelName.HIGHLEVEL)) {
                    high.add(str);
                    highriskparam.add(l);
//                    FileUtil.appendUtf8String("str+" + str, "/usr/local/gcr/20240330/shunxumlsindex.log");
                }
                if (searchEntity.getLevelName().equals(LevelName.MIDLEVEL)) {
                    middle.add(str);
                    middleghrisk.add(l);
//                    FileUtil.appendUtf8String("str+" + str, "/usr/local/gcr/20240330/shunxumlsindex.log");
                }
                if (searchEntity.getLevelName().equals(LevelName.LOWLEVEL)) {
                    low.add(str);
                    Lowghrisk.add(l);
//                    FileUtil.appendUtf8String("str+" + str, "/usr/local/gcr/20240330/shunxumlsindex.log");
                }

//                FileUtil.appendUtf8String("str+" + str, "/usr/local/gcr/20240330/shunxumls.log");
//                if (i >= breakSize) {
//                    //每个维度默认给十条
//                    break;
//                }


            }

        }

//        highrisk = highrisk.stream().sorted(Comparator.comparing(SentenceEntity::getMaxDoubleMatchD).reversed()).collect(Collectors.toList());
        List<String> tableInfoListHigh = new ArrayList<>();
        List<String> tableInfoListMiddle = new ArrayList<>();
        List<String> tableInfoListLow = new ArrayList<>();
        List<String> tableInfoList = new ArrayList<>();
        // 最下面相似文献全部列表展示


        if (highriskparam.size() >= 15) {
            highriskparam = highriskparam.subList(0, 15);
        }

        if (middleghrisk.size() >= 15) {
            middleghrisk = middleghrisk.subList(0, 15);
        }

        if (Lowghrisk.size() >= 15) {
            Lowghrisk = Lowghrisk.subList(0, 15);
        }
        highSentensSize = highriskparam.size();
        middleSentensSize = middleghrisk.size();
        lowSentensSize = Lowghrisk.size();
//        highriskAll.addAll(highriskparam);
//        highriskAll.addAll(middleghrisk);
//        highriskAll.addAll(Lowghrisk);
        //按照顺序来
        //按照顺序来
        // 2024年4月28日20:27:10 按照倒叙来
        if (StrUtils.isRuiJin()) {
            // 瑞金定制
            int s = 1;
            highriskAll.addAll(highriskparam);
            highriskAll.addAll(middleghrisk);
            highriskAll.addAll(Lowghrisk);
            List<SentenceEntity> collect = highriskAll.stream().sorted(Comparator.comparing(SentenceEntity::getMaxDoubleMatchD).reversed()).collect(Collectors.toList());
            for (SentenceEntity sentenceEntity : collect) {
                BackMatchEntity backMatchEntity = sentenceEntity.getBackMatchEntity();
                String str = StrUtils.DelOtherLower(sentenceEntity.getStr());
                if (backMatchEntity != null) {
                    StringBuffer stringBufferFrom = new StringBuffer();
                    String strMatchRed = sentenceEntity.getMatchStr();
                    String sameDouboe = sentenceEntity.getMaxDoubleMatchD() + "%";
                    String title = StrUtils.DelOtherLower(backMatchEntity.getTitle() == null ? "" : backMatchEntity.getTitle());
                    if (!title.endsWith(".")) {
                        title = title + ".";
                    }
                    String doi = StrUtils.DelOtherLower(backMatchEntity.getDoi() == null ? "" : backMatchEntity.getDoi());
                    String year = StrUtils.DelOtherLower(backMatchEntity.getYear() == null ? "" : backMatchEntity.getYear());
                    stringBufferFrom.append("标题:" + title + breakLine);
                    stringBufferFrom.append("文章句子:" + strMatchRed + breakLine);
                    stringBufferFrom.append("相似度:" + sameDouboe + breakLine);
                    stringBufferFrom.append("doi:" + doi + breakLine);
                    stringBufferFrom.append("出版年:" + year + breakLine);
                    String s1 = stringBufferFrom.toString();
                    // 2024年3月20日11:26:51 因为部分特殊字符导致的表格位置错乱打算采用直接拼接字符串 直接写入 老板是这个意思
                    StringBuffer stringBuffer = new StringBuffer();
                    stringBuffer.append("序号: " + s + breakLine);
                    stringBuffer.append("检测原文: " + str + breakLine);
                    stringBuffer.append("相似文献及来源: " + breakLine + s1 + breakLine);
                    tableInfoList.add(stringBuffer.toString());
                    s++;
                }
            }
            if (tableInfoList.size() > 0) {
                doc.getRange().replace("{{tableinfogcr}}", String.join("", tableInfoList), false, false);
            } else {
                doc.getRange().replace("{{tableinfogcr}}", "暂无内容", false, false);
            }
        }

        // 2024年4月28日20:27:10 按照倒叙来
        List<SentenceEntity> collectH = highriskparam.stream().sorted(Comparator.comparing(SentenceEntity::getMaxDoubleMatchD).reversed()).collect(Collectors.toList());
        List<SentenceEntity> collectM = middleghrisk.stream().sorted(Comparator.comparing(SentenceEntity::getMaxDoubleMatchD).reversed()).collect(Collectors.toList());
        List<SentenceEntity> collectL = Lowghrisk.stream().sorted(Comparator.comparing(SentenceEntity::getMaxDoubleMatchD).reversed()).collect(Collectors.toList());

        List<List<SentenceEntity>> allc = new ArrayList<>();
        allc.add(collectH);
        allc.add(collectM);
        allc.add(collectL);
        for (int i = 0; i < allc.size(); i++) {
            List<SentenceEntity> sentenceEntities = allc.get(i);
            int s = 1;
            for (SentenceEntity sentenceEntity : sentenceEntities) {
                BackMatchEntity backMatchEntity = sentenceEntity.getBackMatchEntity();
                String str = StrUtils.DelOtherLower(sentenceEntity.getStr());
                if (backMatchEntity != null) {
                    StringBuffer stringBufferFrom = new StringBuffer();
                    String strMatchRed = sentenceEntity.getMatchStr();
                    // 这里计算 前面不管了 太费劲了 排查错误 2024年3月27日19:39:00
//                Double aDouble = StrUtils.sentenceParseWordNewList(Arrays.asList(getLemma(str).split(" ")), Arrays.asList(getLemma(strMatchRed).split(" ")));
//                double score2 = sentenceEntity.getMaxDoubleMatchD();
//                double score2 = aDouble;
                    String sameDouboe = sentenceEntity.getMaxDoubleMatchD() + "%";
                    String title = StrUtils.DelOtherLower(backMatchEntity.getTitle() == null ? "" : backMatchEntity.getTitle());
                    if (!title.endsWith(".")) {
                        title = title + ".";
                    }
//                String au = StrUtils.DelOtherLower(backMatchEntity.getAu() == null ? "" : backMatchEntity.getAu());
                    String doi = StrUtils.DelOtherLower(backMatchEntity.getDoi() == null ? "" : backMatchEntity.getDoi());
                    String year = StrUtils.DelOtherLower(backMatchEntity.getYear() == null ? "" : backMatchEntity.getYear());
//                    String key = backMatchEntity.getKey() == null ? "" : backMatchEntity.getKey();
                    stringBufferFrom.append("相似句子:" + strMatchRed + breakLine);
                    stringBufferFrom.append("文献标题:" + title + breakLine);
//                    stringBufferFrom.append("相似度:" + sameDouboe + breakLine);
//                stringBufferFrom.append("作者:" + au + breakLine);
//                    stringBufferFrom.append("doi:" + doi + breakLine);
                    stringBufferFrom.append("出版年:" + year + breakLine);
//                    stringBufferFrom.append("PMID:" + key + breakLine);
                    String s1 = stringBufferFrom.toString();
//                Node deepClone = table.getLastRow().deepClone(true);
//                Range range = table.getLastRow().getRange();
                    // 2024年3月20日11:26:51 因为部分特殊字符导致的表格位置错乱打算采用直接拼接字符串 直接写入 老板是这个意思
                    StringBuffer stringBuffer = new StringBuffer();
                    stringBuffer.append("结果" + s + ": " + "句子相似度:" + sameDouboe + breakLine);
                    stringBuffer.append("检测句子: " + str + breakLine);
                    stringBuffer.append("相似句子及来源" + breakLine + s1 + breakLine);
                    if (i == 0) {
                        tableInfoListHigh.add(stringBuffer.toString());
                        s++;
                    }
                    if (i == 1) {
                        tableInfoListMiddle.add(stringBuffer.toString());
                        s++;
                    }
                    if (i == 2) {
                        tableInfoListLow.add(stringBuffer.toString());
                        s++;
                    }
                }
            }
        }


        if (tableInfoListHigh.size() > 0) {
            doc.getRange().replace("{{tableinfogcrhigh}}", String.join("", tableInfoListHigh), false, false);
        } else {
            doc.getRange().replace("{{tabletableinfogcrhighinfogcr}}", "暂无内容", false, false);
        }
        if (tableInfoListMiddle.size() > 0) {
            doc.getRange().replace("{{tableinfogcrmiddle}}", String.join("", tableInfoListMiddle), false, false);
        } else {
            doc.getRange().replace("{{tableinfogcrmiddle}}", "暂无内容", false, false);
        }
        if (tableInfoListLow.size() > 0) {
            doc.getRange().replace("{{tableinfogcrlow}}", String.join("", tableInfoListLow), false, false);
        } else {
            doc.getRange().replace("{{tableinfogcrlow}}", "暂无内容", false, false);
        }

        if (highSentensSize > 0) {
            doc.getRange().replace("{{highsize}}", highSentensSize + "", false, false);
        } else {
            doc.getRange().replace("{{highsize}}", "0", false, false);
        }

        if (middleSentensSize > 0) {
            doc.getRange().replace("{{middlesize}}", middleSentensSize + "", false, false);
        } else {
            doc.getRange().replace("{{middlesize}}", "0", false, false);
        }

        if (lowSentensSize > 0) {
            doc.getRange().replace("{{lowsize}}", lowSentensSize + "", false, false);
        } else {
            doc.getRange().replace("{{lowsize}}", "0", false, false);
        }

        StringBuffer hgsb = new StringBuffer();
        StringBuffer misb = new StringBuffer();
        StringBuffer losb = new StringBuffer();
        if (high.size() > breakSize) {
            high = high.subList(0, breakSize);
        }
        if (middle.size() > breakSize) {
            middle = middle.subList(0, breakSize);
        }
        if (low.size() > breakSize) {
            low = low.subList(0, breakSize);
        }
        for (int i = 0; i < high.size(); i++) {
            String s1 = high.get(i);
            hgsb.append((i + 1) + ": " + s1 + breakLine);
        }

        for (int i = 0; i < middle.size(); i++) {
            String s1 = middle.get(i);
            misb.append((i + 1) + ": " + s1 + breakLine);
        }

        for (int i = 0; i < low.size(); i++) {
            String s1 = low.get(i);
            losb.append((i + 1) + ": " + s1 + breakLine);
        }

        try {
            if (hgsb.toString().length() > 0) {
                doc.getRange().replace("{{high}}", hgsb.toString(), false, false);
            } else {
                doc.getRange().replace("{{high}}", "暂无内容", false, false);
            }
        } catch (Exception e) {
            log.info("hgsb替换错误?");
        }
        try {
            if (misb.toString().length() > 0) {
                doc.getRange().replace("{{middle}}", misb.toString(), false, false);
            } else {
                doc.getRange().replace("{{middle}}", "暂无内容", false, false);
            }
        } catch (Exception e) {
            log.info("misb替换错误?");

        }
        try {
            if (losb.toString().length() > 0) {

                doc.getRange().replace("{{low}}", losb.toString(), false, false);
            } else {

                doc.getRange().replace("{{low}}", "暂无内容", false, false);
            }
        } catch (Exception e) {
            log.info("losb替换错误?");
        }
        String isflag = "没有";
        if (high.size() > 0) {
            isflag = "有";
        }

        doc.getRange().replace("{{isflag}}", isflag, false, false);
        doc.getRange().replace("{{allwords}}", outRes.getAllWordSize() + "", false, false);
        doc.getRange().replace("{{sameword}}", outRes.getAllSameSize() + "", false, false);

//        Double allDoubleArticle = PublicUtils.takeTwoDigits(sentenceSizeAll / sentenceSize.doubleValue());
//        String similar = allDoubleArticle + "%";
        String similar = PublicUtils.takeTwoDigits(outRes.getSameDouble()) + "%";
        //2024年3月27日11:28:21 这里计算错误 需要按照每个句子相似度除以总全部句子单子数字 而不是每个风险句子 得到结果是错误的
        // 最相关的文章集合取前五
        if (bestArticle != null) {
            StringBuffer stringBuffer = new StringBuffer();
            int sin = 1;
            String pre = "";
            for (OutEntity.BestArticle article : bestArticle) {
                String key = article.getKey();
//                log.info("最匹配的文章id++" + key);
                String similarSize = article.getSimilarSize();
                String doi = article.getDoi() == null ? "" : article.getDoi();
                String year = article.getYear() == null ? "" : article.getYear();
                String rres = "";
                if (StringUtil.isNotNull(similarSize)) {
                    rres = "序号: " + sin + breakLine + "标题: " + article.getTitle() + breakLine + "doi: " + doi + breakLine + "出版年: " + year + breakLine + "相似度: " + similarSize + breakLine + "";
                }
                sin++;
                stringBuffer.append(rres + breakLine);
            }
            doc.getRange().replace("{{similarinfos}}", stringBuffer.toString(), false, false);
        } else {
            doc.getRange().replace("{{similarinfos}}", "", false, false);
        }


        // 添加全部相似文献信息 就是把高中低风险的句子加起来进行展示 2024年3月22日15:10:41


        doc.getRange().replace("{{similar}}", similar, false, false);
        String ToPath = uploadPath + "template/" + date + "/";
//        String ToPath = uploadPath + "template/";
        try {
            File targetFile = new File(ToPath);
            if (!targetFile.exists()) {
                //如果文件夹不存在就创建
                targetFile.mkdirs();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

        try {
            doc.save(ToPath + fileName, SaveFormat.DOCX);
            log.info("存储的word 路径新的+" + ToPath + fileName);
//            doc.save(ToPath + fileNamePdf, SaveFormat.PDF);
        } catch (Exception e) {
            e.printStackTrace();

        }
        paperAna.setResultWord(ToPath + fileName);
//        paperAna.setResultPdf(ToPath + fileNamePdf);
        log.info("生成word 结束 返回上一级");
        return ToPath + fileName;
    }

    @SneakyThrows
    public void dealManualWordCheck(WosBasic wosBasic) {
        //2023年10月10日09:55:35 目的两个 主要是 文字段落标红和生成word 生成word 看时间 来得及 就做
        Integer reID = wosBasic.getReFiD();
        String simi = wosBasic.getSimi();
        log.info("计算md5 ");
        String filePath = uploadPath + wosBasic.getFilePath();
        String fileMD5 = "";
        if (FileUtil.exist(filePath) && !FileUtil.isDirectory(filePath)) {
            fileMD5 = DigestUtils.md5Hex(new FileInputStream(filePath));
        }
        Double simiReal = 0.3;
        if (simi != null) {
            try {
                simiReal = Double.valueOf(simi) / 100;
            } catch (Exception e) {
                simiReal = 0.3;
            }
        }
        log.info("自定对对比进行文本赋值 simiReal++" + simiReal);
        // 判断是不是全网比对 也就是
        Integer splitAs = wosBasic.getSplitAs();
        if (StringUtil.isNotNull(reID)) {
            List<String> contextList = new ArrayList<>();
            FileRecord byId = fileRecordsService.getById(reID);
            List<FileDetailsTxt> list = fileDetailsTxtService.list(Wrappers.<FileDetailsTxt>lambdaQuery().eq(FileDetailsTxt::getFId, reID).eq(FileDetailsTxt::getTpye, 1));
            // 老板意思是动态拿 如果匹配到30% 以上整句标红色 且是全网比对时候 自定义也加入 不再判断类型
            long s1 = System.currentTimeMillis();
            for (FileDetailsTxt fileDetails : list) {
                String txtContent = fileDetails.getTxtContent();
                contextList.add(txtContent);
                String matchContent = fileDetails.getMatchContent();
                if (StringUtil.isNotNull(txtContent) && StringUtil.isNull(matchContent)) {
                    //  2024年5月7日08:49:13 目前标红是错误的 进行隐藏 目前入库的没有入库pmid 或者ut 没法排除自己
                    boolean b = false;
//                    if (splitAs != null && splitAs == 0) {
//                        b = fileDetailsTxtService.matchEsRes(txtContent, simiReal);
//                    } else {
//                        b = fileDetailsTxtService.matchEsResManal(txtContent, byId.getId(), byId.getUploadId(), simiReal);
//                    }
                    if (b) {
                        String newtxtContent = "<em>" + txtContent + "</em>";
                        fileDetails.setMatchContent(newtxtContent);
                        fileDetailsTxtService.updateById(fileDetails);
                    } else {
                        fileDetails.setMatchContent("<em></em>");
                        fileDetailsTxtService.updateById(fileDetails);
                    }
                }
            }
            // 生成word
            SmallTool.printMessLongTimeTake("es 检索左侧文字标红耗时++", s1);
            long s2 = System.currentTimeMillis();
            // 这个也需要进行盘判断是不是全网对比
            String cUt = wosBasic.getCUt();
            String cPm = wosBasic.getCPm();
            String pmorut = "";
            if (StringUtil.isNotNull(cUt)) {
                pmorut = cUt;
            }
            if (StringUtil.isNotNull(cPm)) {
                pmorut = pmorut + "," + cPm;
            }
            OutEntity outEntity = null;
            if (StringUtil.isNull(wosBasic.getSelfTxt()) || wosBasic.getSelfTxt() == 0) {
                log.info("dealManualWordCheck 走全网文字库");
                outEntity = dealDataVersion3(wosBasic.getCPy(), wosBasic.getFilePath(), contextList, byId.getId(), byId.getUploadId(), splitAs, pmorut);
            } else {
                log.info("dealManualWordCheck 走自定义文字库");
                outEntity = dealDataVersion3Self(wosBasic.getTxtComIds(), wosBasic.getCPy(), wosBasic.getFilePath(), contextList, byId.getUploadId(), 1, pmorut);
            }

            SmallTool.printMessLongTimeTake("es 检索结束 ", s2);

            //  outEntity.setAllSameSize(0);
            //            outEntity.setAllWordSize(0);
            if (outEntity.getEsava().intValue() == 2) {
                PaperAna paperAna = new PaperAna();
                paperAna.setTitle(wosBasic.getCTi());
                paperAna.setId(wosBasic.getId());
                paperAna.setUptime(StringToDate(com.yuntsg.ruiijn.paperana.utils.DateUtil.LocalDateToStringyyyymm(wosBasic.getSysCreateTime())));
                paperAna.setAuthor(wosBasic.getAuthor());
                log.info("自定对对比进行生成word");
                outEntity.setFileMd5(fileMD5);
                generateToWord2(uploadPath, outEntity, paperAna, wosBasic);
                String resultWord = paperAna.getResultWord();
                log.info("word 路径+" + resultWord);
                resultWord = resultWord.replace("/home/analysis/uploadFile/template/", "");
                wosBasic.setResultWord(resultWord);
                Integer bidId = wosBasic.getBidId();
                log.info("bidId" + bidId);
                if (wosBasic.getBidId() != null) {
                    // 把报告更新到国自然里面去
                    BidDocumentRecordArticle bidDocumentRecordArticle = bidDocumentRecordArticleMapper.selectById(wosBasic.getBidId());
                    if (bidDocumentRecordArticle != null) {
                        bidDocumentRecordArticle.setResWord(upLoadService.OtherPathToStandPath(wosBasic.getResultWord(), "real"));
                        bidDocumentRecordArticle.setUpti(LocalDateTime.now());
                        bidDocumentRecordArticleMapper.updateById(bidDocumentRecordArticle);
                    }
                }
                log.info("wosBasic++id" + wosBasic.getId());
                Double sameDouble = outEntity.getSameDouble();
                if (sameDouble == null) {
                    sameDouble = 0.0;
                    outEntity.setSameDouble(0.0);
                }
                double v = PublicUtils.takeTwoDigits(sameDouble);
                log.info("文章相似度+" + v);
                wosBasic.setTxtSame(v);
                wosBasic.setTxtAnaFlag(-2);

            } else {
                PaperAna paperAna = new PaperAna();
                paperAna.setTitle(wosBasic.getCTi());
                paperAna.setId(wosBasic.getId());
                paperAna.setUptime(StringToDate(com.yuntsg.ruiijn.paperana.utils.DateUtil.LocalDateToStringyyyymm(wosBasic.getSysCreateTime())));
                paperAna.setAuthor(wosBasic.getAuthor());
                log.info("自定对对比进行生成word");
                outEntity.setFileMd5(fileMD5);
                generateToWord2(uploadPath, outEntity, paperAna, wosBasic);
                String resultWord = paperAna.getResultWord();
                log.info("word 路径+" + resultWord);
                resultWord = resultWord.replace("/home/analysis/uploadFile/template/", "");
                wosBasic.setResultWord(resultWord);
                Integer bidId = wosBasic.getBidId();
                log.info("bidId" + bidId);
                if (wosBasic.getBidId() != null) {
                    // 把报告更新到国自然里面去
                    BidDocumentRecordArticle bidDocumentRecordArticle = bidDocumentRecordArticleMapper.selectById(wosBasic.getBidId());
                    if (bidDocumentRecordArticle != null) {
                        bidDocumentRecordArticle.setResWord(upLoadService.OtherPathToStandPath(wosBasic.getResultWord(), "real"));
                        bidDocumentRecordArticle.setUpti(LocalDateTime.now());
                        bidDocumentRecordArticleMapper.updateById(bidDocumentRecordArticle);
                    }
                }

                log.info("wosBasic++id" + wosBasic.getId());
                Double sameDouble = outEntity.getSameDouble();
                if (sameDouble == null) {
                    sameDouble = 0.0;
                    outEntity.setSameDouble(0.0);
                }
                double v = PublicUtils.takeTwoDigits(sameDouble);
                log.info("文章相似度+" + v);
                wosBasic.setTxtSame(v);
                wosBasic.setTxtAnaFlag(1);
            }


        }

    }

    //自定义入库进行比对
    @SneakyThrows
    @Async
    // 异步模式
    public void caculateStr(String colids, String artYear, String orgPdfpah, String s, String key, Integer uid, String pmorut, Integer selfTxt) {
        List<String> strs = new ArrayList<>();
        if (s.contains("\r\n")) {
            String[] split = s.split("\r\n");
            for (String s1 : split) {
                strs.add(s1);
            }
        } else if (s.contains("\n")) {
            String[] split = s.split("\n");
            for (String s1 : split) {
                strs.add(s1);
            }
        } else {
            strs.add(s);
        }
        log.info("断落转句子数量+" + strs.size());

        // 测试文字

        /**
         *         Emerging evidence suggests that post concussive symptoms, including mood changes, may be improved through morning blue-wavelength light therapy
         *         The sDCM analysis showed dominant connectivity pattern following mTBI (pre-treatment) within the hemisphere contralateral to the one observed for HCs
         *         We investigated the extent to which cognitive dysfunction is shaped by genetic or environmental influences, and whether these factors differ in women and men
         *         The findings emphasize the extent to which research is needed to uncover nonfamilial environmental influences on cognitive dysfunction in later life
         *         Carotid atherosclerosis is an important cause of stroke. Intra-plaque haemorrhage (IPH) on magnetic resonance imaging (MRI) increases stroke risk
         *
         *
         *
         */
        // 默认全网对比
        OutEntity outEntity = null;
        if (selfTxt == null || selfTxt == 0) {
            log.info("caculateStr 全网文字 不该出现 否则错误");
//            outEntity = dealDataVersion3(artYear, orgPdfpah, strs, "", null, null, 0, pmorut);
        } else {
            log.info("caculateStr 自定义文字 不该出现 否则错误");
//            outEntity = dealDataVersion3Self(colids, artYear, orgPdfpah, strs, "", null, null, 1, pmorut);
        }

        if (outEntity != null) {
            if (outEntity.getEsava().intValue() == 2) {
                PaperAna paperAna = paperAnaMapper.selectOne(new QueryWrapper<PaperAna>().eq("rekey", key));
                if (paperAna != null) {
                    log.info("生成word结果异常");
                    paperAna.setResFlag(1);
                    paperAna.setResult(JSONObject.toJSONString(outEntity));
                    log.info("失败+++标题" + paperAna.getTitle());
                    paperAnaMapper.updateById(paperAna);
                } else {
                    log.info("PaperAna 未查询到 请留意");
                }
            } else {
                PaperAna paperAna = paperAnaMapper.selectOne(new QueryWrapper<PaperAna>().eq("rekey", key));
                if (paperAna != null) {
                    paperAna.setResult(JSONObject.toJSONString(outEntity));
                    try {
                        generateToWord2(uploadPath, outEntity, paperAna, null);
                        paperAna.setResFlag(0);
                    } catch (Exception e) {
                        e.printStackTrace();
                        log.info("生成word结果异常");
                        paperAna.setResFlag(1);
                    }
                    log.info("成功+++标题" + paperAna.getTitle());
                    paperAnaMapper.updateById(paperAna);
                } else {
                    log.info("PaperAna 未查询到 请留意");
                }
            }
        } else {
            log.info("实体类 OutEntitynull ");
            PaperAna paperAna = paperAnaMapper.selectOne(new QueryWrapper<PaperAna>().eq("uid", uid).eq("rekey", key));
            if (paperAna != null) {
                paperAna.setResFlag(1);
                paperAnaMapper.updateById(paperAna);
            }

        }
//        redisUtil.saveObject(key, outEntity);
    }


//    public static BackMatchEntity findTableInfo(Integer tableId, Integer fileId, String matchStr, String isManual) {
//        // 进行匹配原始标题进行查表
//
//
//
//        Db use = Db.use(DSFactory.get("paper_article"));
//        boolean is = Boolean.valueOf(isManual);
//
//        if (!is) {
//            if (tableId > 0) {
//                String table = "ncbi_pdf_to_txt_101".replace("101", tableId + "");
//                Entity entity = DbSearchUtil.backEntity(use, table, fileId);
//                if (entity != null) {
//                    Integer file_id = entity.getInt("file_id");
//                    if (StringUtil.isNotNull(file_id)) {
//                        String URL = "https://pm.yuntsg.com/utils/getpuinfo?type=1&q=123";
//                        String s = HttpUtil.get(URL.replace("123", file_id + ""));
//                        if (StringUtil.isNotNull(s)) {
//                            JSONObject jsonObject = JSONObject.parseObject(s);
//                            JSONObject data = jsonObject.getJSONObject("data");
//                            String code = jsonObject.getString("code");
//                            if (code.equals("0")) {
//                                PmUtEntity pmUtEntity = data.toJavaObject(PmUtEntity.class);
//                                BackMatchEntity ba = new BackMatchEntity();
//                                ba.setType("pm");
//                                ba.setTitle(pmUtEntity.getTitle());
//                                ba.setAu(pmUtEntity.getAu());
//                                ba.setJour(pmUtEntity.getJour());
//                                ba.setMatchStr(matchStr);
//                                ba.setKey(file_id + "");
//                                ba.setDoi(pmUtEntity.getDoi());
//                                return ba;
//                            }
//                        }
//                    }
//                } else {
//                    return null;
//                }
//            }
//        }
//
//
//        return null;
//    }


    // 最大分数计算需要计算类似

//    //系数风险
//    public static double catDouble(List<String> from, List<String> compare) {
//
//
//    }

    // 2023年9月12日09:46:39 生成word版本存在表里面

//    /**
//     * 判断字符
//     *
//     * @param sentenceDiskLevel 赋值对象
//     * @param org               原始文章段落
//     * @param score             得分
//     * @param esid              es 检索的id
//     * @param maxSameTemp       相同的数量
//     */
//    public void checkRiskSet(SentenceDiskLevel sentenceDiskLevel,
//                             String org,
//                             String strcntest1lemmaOrg,
//                             double score,
//                             List<String> keyOrgSp,
//                             List<SentenceDiskLevel> res,
//                             String esid, Integer maxSameTemp,
//                             Map<String, EsServerEntity> tempMap,
//                             boolean isMaul,
//                             Map<String, String> pminfoSetRes) {
//
//
//        //批量进行赋值
//        boolean isDeal = false;
//        if (score >= 0.8 & !isDeal) {
//            isDeal = true;
//            sentenceDiskLevel.setLevelName(LevelName.HIGHLEVEL);
//
//        }
//        if (score >= 0.6 & !isDeal) {
//            isDeal = true;
//            sentenceDiskLevel.setLevelName(LevelName.MIDLEVEL);
//
//        }
//        if (score >= 0.4 & !isDeal) {
//            isDeal = true;
//            sentenceDiskLevel.setLevelName(LevelName.LOWLEVEL);
//        }
//        if (StringUtil.isNotNull(sentenceDiskLevel.getLevelName())) {
//            //进行组合数据
//            List<SentenceEntity> ls = sentenceDiskLevel.getLs();
//            SentenceEntity sentenceEntity = new SentenceEntity();
//            sentenceEntity.setStr(org);
//            sentenceEntity.setStrInt(keyOrgSp.size());
//            sentenceEntity.setMaxMatchInt(maxSameTemp);
//            double similarityRatio = Test.getScore2(org, strcntest1lemmaOrg);
////            Integer size = keyOrgSp.size() + machStrOrgSize;
////            Integer size = machStrOrgSize;
////            double tempCa = maxSameTemp.doubleValue() / size.doubleValue();
//            double tempCa = similarityRatio;
//            double v = tempCa;
//            sentenceEntity.setMaxDoubleMatch(v + "%");
//            EsServerEntity esServerEntity = tempMap.get(esid);
//            sentenceEntity.setMatchStr(esServerEntity.getStr());
//            sentenceEntity.setStrMatchRed(esServerEntity.getStrMatchRed());
//            sentenceEntity.setMatchStrInt(maxSameTemp);
//            List<BackMatchEntity> temp = new ArrayList<>();
//            tempMap.forEach((k, v1) -> {
//                //先把每句话放弃 太慢了 后续优化
//                Map<String, Object> param = new HashMap<>();
//                param.put("tableId", v1.getTable_id());
//                param.put("fileId", v1.getFile_id());
//                param.put("matchStr", v1.getStr());
//                param.put("isManual", String.valueOf(isMaul));
//                param.put("pmorut", Integer.parseInt(v1.getDatabase_type()));
//                String PMINFOKEY = v1.getTable_id() + "" + v1.getFile_id();
//                String post = "";
//                if (pminfoSetRes.containsKey(PMINFOKEY)) {
//                    post = pminfoSetRes.get(PMINFOKEY);
//                } else {
//                    String URL = escenterUrl + "web/pub/getpminfo";
//                    post = HttpUtil.post(URL, param);
//                    if (StringUtil.isNotNull(post)) {
//                        String newres = post;
//                        pminfoSetRes.put(PMINFOKEY, newres);
//                    }
//                }
//                Map parse = JSONObject.parseObject(post, Map.class);
//                BackMatchEntity tableInfo = null;
//                if (parse.get("code").toString().equals("0")) {
//                    tableInfo = JSONObject.parseObject(parse.get("data").toString(), BackMatchEntity.class);
//                }
//                if (tableInfo != null) {
//                    tableInfo.setMatchStr(v1.getStr());
//                    temp.add(tableInfo);
//                }
//            });
//
//            Map<String, Object> param = new HashMap<>();
//            param.put("tableId", esServerEntity.getTable_id());
//            param.put("fileId", esServerEntity.getFile_id());
//            param.put("matchStr", esServerEntity.getStr());
//            param.put("isManual", String.valueOf(isMaul));
//            param.put("pmorut", Integer.parseInt(esServerEntity.getDatabase_type()));
//            String PMINFOKEY = esServerEntity.getTable_id() + "" + esServerEntity.getFile_id();
//            String post = "";
//            if (pminfoSetRes.containsKey(PMINFOKEY)) {
//                post = pminfoSetRes.get(PMINFOKEY);
//            } else {
//                String URL = escenterUrl + "web/pub/getpminfo";
//                post = HttpUtil.post(URL, param);
//                if (StringUtil.isNotNull(post)) {
//                    String newres = post;
//                    pminfoSetRes.put(PMINFOKEY, newres);
//                }
//            }
//
//            Map parse = JSONObject.parseObject(post, Map.class);
//            BackMatchEntity tableInfo = null;
//            if (parse.get("code").toString().equals("0")) {
//                tableInfo = JSONObject.parseObject(parse.get("data").toString(), BackMatchEntity.class);
//            }
//            BackMatchEntity diskLevel = JSONObject.parseObject(JSONObject.toJSONString(tableInfo), BackMatchEntity.class);
//            if (tableInfo != null) {
//                sentenceEntity.setBackMatchEntity(tableInfo);
//                temp.add(diskLevel);
//            }
//            //放开十个请求进行查看单个
//            sentenceEntity.setBackMatchEntityAll(temp);
//            ls.add(sentenceEntity);
//            res.add(sentenceDiskLevel);
//        }
//    }

    /**
     * @param compareStr 要比对的数据集合
     * @param str        要入库的字符
     * @return
     */
    public void InsTempEs(String[] compareStr, String str, Employee sysUser) {
        long date = System.currentTimeMillis();
        String key = Code.digestData(date + sysUser.getId() + "paperana");
        PaperAnaTodo paperAnaTodo = new PaperAnaTodo();
        paperAnaTodo.setUid(sysUser.getId());
        paperAnaTodo.setUploadName(sysUser.getUserName());
        paperAnaTodo.setTitle(DateUtil.now() + "");
        paperAnaTodo.setContent(str);
        paperAnaTodo.setRekey(key);
        paperAnaTodo.setContentList(JSON.toJSONString(compareStr));
        paperAnaTodo.setStatus(1);
        paperAnaTodo.setUptime(System.currentTimeMillis());
        paperAnaTodo.setSysCreateTime(LocalDateTime.now());
        paperAnaTodo.setSysUpdateTime(LocalDateTime.now());
        paperAnaTodoMapper.insert(paperAnaTodo);
    }

    // 创建es 自定义文字库
    @Async
    public void createTextIndex(String indexName) {
        try {
            boolean value = esClient.indices().exists(b -> b.index(indexName)).value();
            log.info("文字库存在吗indexName?+" + indexName + "++" + value);
            if (value) {
                log.info("存在不创建文字库");
                return;
            } else {
                log.info("不存在++++++++++++创建文字库");
                // 创建连接 创建索引库 库名 这是创建临时库
                CreateIndexResponse createIndexResponse =
                        esClient.indices().create(c -> c.index(indexName)//库名表
                                /*这个是Index名字,可以直接字符串*/
                                .settings(indexSettingsBuilder -> indexSettingsBuilder.numberOfReplicas("0").numberOfShards("4"))
                                .mappings(o -> o
                                        //设置字段名称和类型 当前主要是分词就够用了
                                        .properties("database_type", i -> i.text(p -> p))
                                        .properties("sentence", i -> i.text(p -> p))
                                        .properties("database_type", i -> i.text(p -> p))
                                        // 这个就是默认文字检索 不需要其他属性了
                                        .properties("str", i -> i.text(p -> p
                                                .analyzer("ik_max_word")
                                                .searchAnalyzer("ik_max_word")
                                                .fields("keyword", r -> r.keyword(m -> m.normalizer("lowercase")))))
                                        .properties("sub_project", i -> i.text(p -> p))
                                        .properties("file_id", i -> i.text(p -> p))
                                        .properties("table_id", i -> i.text(p -> p))
                                ));
                // 打印结果
                System.out.println(createIndexResponse.acknowledged());
                boolean value2 = esClient.indices().exists(b -> b.index(indexName)).value();
                log.info("创建后检查+" + indexName + "++" + value2);
                return;

            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    /**
     * @param pdf      解析出来的待比对文章段落
     *                 // 这里下面注释可能不准确 以数据库字段为准
     * @param file_id  记录表的主键id
     * @param table_id 用户id
     * @return
     */
    public boolean dealManualData(List<FileDetailsTxt> pdf, Integer file_id, Integer table_id) {
        String index = IndexConfig.SELF_TETX_INDEX;
        log.info("入库es名称+" + index);
        List<String> strsCompare = new ArrayList<>();
        for (FileDetailsTxt fileDetails : pdf) {
            String txtContent = fileDetails.getTxtContent();
            strsCompare.add(txtContent);
        }
        // 删除存在的数据
        //进行不对然后出结果
//        EsClient.deleEsDataIndex(index);
        boolean isTrue = true;
        List<BulkOperation> bulkOperations = new ArrayList<>();
        for (String s : strsCompare) {
            EsServerEntityTemp esEntity = new EsServerEntityTemp();
            esEntity.setSentence("");
            esEntity.setDatabase_type("");
            esEntity.setStr(s);
            esEntity.setSub_project("");
            esEntity.setFile_id(file_id + "");
            esEntity.setTable_id(table_id + "");
            bulkOperations.add(new BulkOperation.Builder().create(d -> d.document(esEntity)).build());
        }
        log.info("入库前+es+" + index + "数量是bulkOperations+" + bulkOperations.size());
        if (bulkOperations.size() > 5000) {
            List<List<BulkOperation>> split = ListUtil.split(bulkOperations, 5000);
            for (List<BulkOperation> operations : split) {
                try {
                    client.bulk(e -> e.index(index).operations(operations));
                } catch (IOException e) {
                    isTrue = false;
                    e.printStackTrace();
                }
            }

        } else {
            try {
                client.bulk(e -> e.index(index).operations(bulkOperations));
            } catch (IOException e) {
                isTrue = false;
                e.printStackTrace();
            }
        }
        return isTrue;
    }

    /**
     * @param pdf 解析出来的待比对文章段落
     * @param rid 记录表的主键id
     * @param uid 用户id
     * @return
     */
    public boolean dealManualDataSta(List<FileDetailsTxt> pdf, Integer rid, Integer uid) {
        String index = IndexConfig.SELF_TETX_INDEX;
        log.info("入库es名称+" + index);
        List<String> strsCompare = new ArrayList<>();
        for (FileDetailsTxt fileDetails : pdf) {
            String txtContent = fileDetails.getTxtContent();
            strsCompare.add(txtContent);
        }
        // 删除存在的数据
        //进行不对然后出结果
//        EsClient.deleEsDataIndex(index);
        boolean isTrue = true;
        List<BulkOperation> bulkOperations = new ArrayList<>();
        for (String s : strsCompare) {
            EsServerEntityTemp esEntity = new EsServerEntityTemp();
            esEntity.setSentence("");
            esEntity.setDatabase_type("");
            esEntity.setStr(s);
            esEntity.setSub_project("");
            esEntity.setFile_id(rid + "");
            esEntity.setTable_id(uid + "");
            bulkOperations.add(new BulkOperation.Builder().create(d -> d.document(esEntity)).build());
        }
        if (bulkOperations.size() > 5000) {
            List<List<BulkOperation>> split = ListUtil.split(bulkOperations, 5000);
            for (List<BulkOperation> operations : split) {
                try {
                    client.bulk(e -> e.index(index).operations(operations));
                } catch (IOException e) {
                    isTrue = false;
                    e.printStackTrace();
                }
            }

        } else {
            try {
                client.bulk(e -> e.index(index).operations(bulkOperations));
            } catch (IOException e) {
                isTrue = false;
                e.printStackTrace();
            }
        }
        return isTrue;
    }

//    // 获取两篇论文之间的相似度。这里有一个重要的参数，即如果两个句子之间，相同的个数大于40%时，才给予统计，否则是放弃的。
//    public static double getArticleSimilarity(List<List<String>> fromSentenceList, List<List<String>> toSentenceList) throws IOException {
//        int containsSum = 0;
//        int fromWordSum = 0;
//        for (List<String> fromWordList : fromSentenceList) {
//            int containsNum = getMaxSentenceWordNum(fromWordList, toSentenceList);
//            // 每一个句子，包含的最大的单词的个数，如果大于该句子的总单词数量的40%，计入重复的总数。
//            if ((containsNum + 0.0) / (fromWordList.size() + 0.0) > 0.4) {
//                containsSum += containsNum;
//            }
//            // 待检测的论文的单词的总个数，不管相同的单词的个数的数量有多少，全部计算。
//            fromWordSum += fromWordList.size();
//        }
//        return takeFourDigits((containsSum + 0.0) / (fromWordSum + 0.0)) * 100;
//    }

    /**
     * 文字比对版本3 详细看下面说明
     * 2024年3月28日20:18:48
     * 根据老板和同事我商量 采用新流程,新思路
     * 1 首先还是es 检索句子 拿出最匹配的那个前5个. 且相似度大于60的才可以.不能取最相似的句子 都保存其来 累加 因为累加的都是60% 以上的
     * 2 然后 根据老板逻辑 获取到倒排序前100文章信息 ut 或者pm 进行获取全文,进行老板思路进行一个个进行获取全文走全文流程获取,拿着本篇文章与各个文章计算相似度
     * 3 最后输出结果
     *
     * @param orgPdfPath 待分析的pdf 全文路径 没有可以传空
     * @param str        分析的句子集合
     * @param fid        file_id  记录表的主键id 表格主键
     * @param uid        table_id  ncbi_pdf_to_txt_101 表名的后缀101
     * @param splitAs    splitAs==1 的话是自定义对比
     * @param pmorut     要排除的pm或者ut号
     * @return
     */
    @SneakyThrows
    public OutEntity dealDataVersion3(String artYear, String orgPdfPath, List<String> str, Integer fid, Integer uid, Integer splitAs, String pmorut) {
        if (StringUtil.isNotNull(pmorut)) {
            pmorut = pmorut.toLowerCase().replace("wos:", "");
        }
        log.info("开始比对数据需要排除的 pmorut+" + pmorut);
        log.info("开始比对数据需要排除的 artYear+" + artYear);
        log.info("splitAs+" + splitAs);
        OutEntity outEntity = new OutEntity();
        if (StringUtil.isNotNull(pmorut)) {
            outEntity.setSelfextra("是");
        } else {
            outEntity.setSelfextra("否");
        }
        Integer artYearInt = null;
        if (StringUtil.isNotNull(artYear)) {
            artYearInt = Integer.parseInt(artYear);
            outEntity.setAyear(artYearInt);
        }
        log.info("outEntity.setAyear++" + outEntity.getAyear());
        //2024年3月21日18:55:44 全新对比机制 采用相同单词数进行
        // es检索相似度采用 minimumShouldMatch 至少60% 采用list 比对相同字符数 高中低 采用 80% 70 % 60%相似度进行获取数据
        // 最后显示全部可能相似文章 不再获取最匹配的文字的相似度 计算不准确
        //参与比对的全文真实句子
        List<String> compareOrgStr = new ArrayList<>();
        boolean b = esSearch.checkOnlineEsIsAVA(txtEsClient);
        // 首先检测es 检索可用性 不行就后面操作就不做了
//        String strTest = "A bacterial strain, QWE-5, which utilized naphthalene as its sole carbon and energy source";
//        List<EsServerEntity> strcntestTest = esSearch.getSearchListTong(esClient, strTest, INDEX, fid, uid, splitAs, "60%");
        boolean isAVA = b;
//        if (strcntestTest.size() == 0) {
//            isAVA = false;
//            T:
//            for (int i = 0; i < 3; i++) {
//                strcntestTest = esSearch.getSearchListTong(esClient, strTest, INDEX, fid, uid, splitAs, "60%");
//                if (strcntestTest.size() > 0) {
//                    isAVA = true;
//                    break T;
//
//                }
//            }
//        }
        log.info("检测全网文字库结束 可用性是+" + isAVA);
        if (isAVA) {
            List<SentenceDiskLevel> res = new ArrayList<>();
            // 是不是自定义对比
            boolean isMaul = false;
            if (splitAs != null && splitAs == 1) {
                isMaul = true;
            }
            if (!isMaul) {
                log.info("全网比对");
            } else {
                log.error("自建库比对--这里不应该出来 否则是错误的");
            }
            // 测试句子的总单词数量
            Integer strSum = 0;
            // 从ES检索到的句子的相同单词的数量总和
            Integer esStrSum = 0;
            //只有大于0.6相似度的才纳入统计
            Double riskDouble = 0.6;
            // 论文的Id号(pm or ut )及重复的总次数
            Map<String, Integer> maxArticleMap = new HashMap<>();
            // 存储所有检索结果
//        Map<String, EsServerEntity> tempMapAllESInfo = new LinkedHashMap<>();
            //存储临时数据 pm 数据 以后直接获取加快得到报告
//        Map<String, String> pminfoSetRes = new HashMap<>();
            log.info("待分析句子总数+" + str.size());
            long sss = System.currentTimeMillis();
            for (String s : str) {
//            SentenceDiskLevel sentenceDiskLevel = new SentenceDiskLevel();
                if (StringUtil.isNotNull(s)) {
                    // 进行适配老板逻辑 2023年9月4日10:53:46  进行比对数据
                    String[] sTemp = StrUtils.repOtherStr(s).split(" ");
                    List<String> strList = new ArrayList<>();
                    // 原始语句不能处理要不然会在报告里面的句子就对应不起来了
                    for (String s1 : sTemp) {
                        strList.add(getLemma(s1).toLowerCase());
                    }
                    strSum += strList.size();
                    // 句子必须大于三个单词 这里是英文处理方式 需要
                    if (strList.size() > 3) {
                        if (!checkReferenceExclude(s)) {
                            compareOrgStr.add(s);
                            List<String> keyOrgSp = strList;
//                        String orgStr = String.join(" ", keyOrgSp);
                            List<EsServerEntity> strcntest = new ArrayList<>();
                            try {
                                strcntest = esSearch.esSearchTong(esClient, s, fid, uid, splitAs, "60%", artYear, pmorut);
                            } catch (Exception e) {
                                e.printStackTrace();
                            }
                            // 获取 计算当前句子总和 2024年3月21日19:34:56 明天上午写 现在写不灵光容易出错
//                        String esaid = "xxx";
//                        int maxSameTemp = 0;
//                        Map<String, EsServerEntity> tempMap = new LinkedHashMap<>();
                            if (strcntest.size() > 0) {
                                if (strcntest.size() == 1) {
                                    strcntest = strcntest.subList(0, 1);
                                } else {
                                    if (strcntest.size() >= 5) {
                                        strcntest = strcntest.subList(0, 5);
                                    }
                                }
                            }
                            // 只取es 返回结果第一条正式匹配的 其他忽略掉
                            for (EsServerEntity esEntityTemp : strcntest) {
                                if (!isMaul) {
                                    String pmutes = esEntityTemp.getPmid_ut();
                                    if (StringUtil.isNotNull(pmutes)) {
                                        String backPmid = pmutes;
                                        String[] sTemp2 = (esEntityTemp.getStr()).split(" ");
                                        StringBuffer stringBuffer = new StringBuffer();
                                        for (String s1 : sTemp2) {
                                            stringBuffer.append(getLemma(s1).toLowerCase() + " ");
                                        }
                                        String esSearchResLemma = stringBuffer.toString();
                                        List<String> esResList = StrUtils.sentenceParseWordList(esSearchResLemma);
                                        //相同个数得分
                                        Double aDouble = StrUtils.sentenceParseWordNewList(keyOrgSp, esResList);
                                        //相同个数
                                        Integer sameSize = StrUtils.sentenceParseWordSin(keyOrgSp, esResList);
                                        //计算总得分
                                        // 最大的匹配的单词个数等，是指通过系数计算后匹配度最高的那个句子的值，而不是单单的匹配的相同的单词最多的那一个。
//                                            esEntityTemp.setSimilarSize(aDouble + "%");
//                                            esEntityTemp.setStrSize(sameSize + "");
//                                            esEntityTemp.setStrMatch(esSearchResLemma);
//                                            tempMap.put(esEntityTemp.getESid(), esEntityTemp);
                                        if (aDouble >= riskDouble) {
//                                                esaid = esEntityTemp.getESid();
                                            esStrSum += sameSize;
                                            // 累加所有
//                                                isMatch = true;
                                            maxArticleMap.compute(backPmid, (key, value) -> value != null ? (value + sameSize) : sameSize);
                                            // 自己风险判断丢失 不用 我的程序只记录风险句子和对应文章标识
//                                                checkRiskSetVersion2(sentenceDiskLevel, orgStr, aDouble, keyOrgSp, res, esaid, maxSameTemp, tempMap, isMaul, pminfoSetRes);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
            SmallTool.printMessLongTimeTake("句子结束总耗时++", sss);
            SmallTool.printMess("真实分析的句子总数+" + compareOrgStr.size());
            //释放最匹配的文章信息
            //暂且使用前一百
            Map<String, Integer> stringIntegerMap = PublicUtils.mapSortValueLen(maxArticleMap, 5);

            List<String> strings2 = new ArrayList<>(stringIntegerMap.keySet());
//        List<OutEntity.BestArticle> similarArtLs = new ArrayList<>();
            log.info("开始匹配最佳文章strings+" + strings2.size());
            Calendar calendar = Calendar.getInstance();
            String date = "" + calendar.get(Calendar.YEAR) + (calendar.get(Calendar.MONTH) + 1) + calendar.get(Calendar.DATE);
            // 临时下载全部pdf 路径 比对完删除全部数据 防止垃圾占用空间
            // 下载pdf 根路径
            String downTempPdfPath = "/home/analysis/comparepdftemp/" + date + "/";
            if (!FileUtil.exist(downTempPdfPath)) {
                File path = new File(downTempPdfPath);
                path.mkdirs();
            }

            // 2024年3月28日20:46:33
            // 复制到旧的文章数据到指定文件夹
            //存储的ut 或者pm 下载的全文路径 拍重 如果有重复下载的 不做pm 或者ut 过滤的情况 还需要耗时
//        Map<String, String> ButpmPdfPath = new LinkedHashMap<>();
            List<String> bFileDownPdfPath = new ArrayList<>();
            //  下载的文件名和对应 匹配的文章基础信息
            Map<String, OutEntity.BestArticle> filePdfNameArticleInfo = new LinkedHashMap<>();

            // 原始比对的pdf路径
            String apdfPath = uploadPath + orgPdfPath;
            log.info("原始文件路径+" + apdfPath);
            String afileName = new File(apdfPath).getName();
            if (!FileUtil.isDirectory(apdfPath)) {
                FileUtil.copy(apdfPath, downTempPdfPath, true);
            } else {
                log.info("因为是文件夹 应该是纯文字过的方法 后续不适应 后期改吧 空的 不允许复制");
            }
            if (strings2.size() > 0) {
                for (String string : strings2) {
                    OutEntity.BestArticle bestArticle = new OutEntity.BestArticle();
                    String s = string;
//                log.info("匹配最佳文章开始++传入参数" + s);
                    Map<String, Object> param = new HashMap<>();
                    param.put("pmut", s);
                    if (s.length() == 15 || s.length() == 14) {
                        param.put("pmorut", 2);
                    } else {
                        param.put("pmorut", 1);
                    }
                    String post = HttpUtil.post(escenterUrl + "web/pub/getpminfoByid", param);
                    Map parse = JSONObject.parseObject(post, Map.class);
                    BackMatchEntity tableInfo = null;
                    if (parse.get("code").toString().equals("0")) {
                        tableInfo = JSONObject.parseObject(parse.get("data").toString(), BackMatchEntity.class);
                    }
                    if (tableInfo != null) {
                        BeanUtils.copyProperties(tableInfo, bestArticle);
                    }
                    // 这里处理关于全文的最佳匹配相似度用的
                    WosBasic wosBasic = new WosBasic();
                    if (param.get("pmorut").toString().equals("1")) {
                        wosBasic.setCPm(s);
                    } else {
                        wosBasic.setCUt(s);
                    }
                    String downPdfPath = getBestMatchArticleStr(downTempPdfPath, wosBasic);
                    if (StringUtils.isNotEmpty(downPdfPath)) {
                        bFileDownPdfPath.add(new File(downPdfPath).getName());
                        filePdfNameArticleInfo.put(new File(downPdfPath).getName(), bestArticle);
                    }
                }
            }
            // 开始走老板逻辑进行获取比对全文信息

            Caculate(downTempPdfPath, afileName, bFileDownPdfPath, riskDouble, outEntity, filePdfNameArticleInfo);
            log.info("获取最匹配文章信息结束");
            outEntity.setAllSameSize(esStrSum);
            outEntity.setAllWordSize(strSum);
            outEntity.setEsava(1);
            log.info("转换数据结束");
        } else {
            log.info("检索结果不可用执行默认值赋值");
            outEntity.setAllSameSize(0);
            outEntity.setAllWordSize(0);
            outEntity.setSameDouble(0.0);
            outEntity.setSameDouble(0.0);
            outEntity.setEsava(2);
            log.info("转换数据结束文字检索库不可用");
        }
        return outEntity;
    }

    /**
     * 文字比对版本3 详细看下面说明
     * 2024年4月28日09:45:29
     * 这个是根据上面的复制过来的
     * 主要是入股用户使用自定义库则进行的文字比对 就与上面区分开,不再再上面通过参数进行区分,后期改动超级麻烦
     *
     * @param orgPdfPath 待分析的pdf 全文路径 没有可以传空
     * @param str        分析的句子集合
     * @param comIds     比较的自建库的文字id集合 没有默认是全部自己的库
     * @param uid        table_id  ncbi_pdf_to_txt_101 表名的后缀101 -> 指的 指的 TextSelfDb uid
     * @param splitAs    splitAs==1 的话是自定义对比 也就是自定义库对比
     * @param pmorut     要排除的pm或者ut号
     * @return
     */
    @SneakyThrows
    public OutEntity dealDataVersion3Self(String comIds, String artYear, String orgPdfPath, List<String> str, Integer uid, Integer splitAs, String pmorut) {
        log.info("开始自定义库自定义文字比对数据需要排除的 pmorut+" + pmorut);
        log.info("开始自定义库自定义文字比对比对数据需要排除的 artYear+" + artYear);
        log.info("开始自定义库自定义文字比对比对数据需要对比自定义库的id  comIds+" + comIds);
        log.info("开始自定义库自定义文字比对比对数据需要对比自定义库的id  uid+" + uid);
        if (uid != null) {
            uid = null;
        }

        // 如果是瑞金服务器 则进行单独处理
        OutEntity outEntity = new OutEntity();
        if (StrUtils.isRuiJin()) {
            log.info("瑞金服务器进行测试");
            Integer artYearInt = null;
            if (StringUtil.isNotNull(artYear)) {
                artYearInt = Integer.parseInt(artYear);
                outEntity.setAyear(artYearInt);
            }
            log.info("outEntity.setAyear++" + outEntity.getAyear());
            //2024年3月21日18:55:44 全新对比机制 采用相同单词数进行
            // es检索相似度采用 minimumShouldMatch 至少60% 采用list 比对相同字符数 高中低 采用 80% 70 % 60%相似度进行获取数据
            // 最后显示全部可能相似文章 不再获取最匹配的文字的相似度 计算不准确
            //参与比对的全文真实句子
            List<String> compareOrgStr = new ArrayList<>();
            // 是不是自定义对比
            boolean isMaul = false;
            if (splitAs != null && splitAs == 1) {
                isMaul = true;
            }
            if (!isMaul) {
                log.error(" 本地库对比 这句话应该不打印 全网比对");
            } else {
                log.info("我的自定义库进行对比");
            }

            boolean b = esSearch.checkLocalEsIsAVA(esClient);
            // 首先检测es 检索可用性 不行就后面操作就不做了
            boolean isAVA = b;
            if (isAVA) {
                // 测试句子的总单词数量
                Integer strSum = 0;
                // 从ES检索到的句子的相同单词的数量总和
                Integer esStrSum = 0;
                //只有大于0.6相似度的才纳入统计
                Double riskDouble = 0.6;
                // 论文的Id号(pm or ut )及重复的总次数
                Map<String, Integer> maxArticleMap = new HashMap<>();
                log.info("待分析句子总数+" + str.size());
                long sss = System.currentTimeMillis();
                for (String s : str) {
//                    System.out.println("比对的句子++" + s);
                    if (StringUtil.isNotNull(s)) {
                        // 可能是英文句子 也可能是中文句子
                        // 进行适配老板逻辑 2023年9月4日10:53:46  进行比对数据
                        boolean isContainChina = CheckFirstUnit.isContainChinese(s);
                        String[] sTemp = StrUtils.repOtherStr(s).split(" ");
                        List<String> strList = new ArrayList<>();
                        // 原始语句不能处理要不然会在报告里面的句子就对应不起来了
                        for (String s1 : sTemp) {
                            strList.add(getLemma(s1).toLowerCase());
                        }
                        if (strList.size() == 1) {
                            strSum += StrUtils.cnEnToListWord(strList).size();
                        } else {
                            strSum += strList.size();
                        }
                        // 句子必须大于三个单词
                        if (strList.size() > 3 || isContainChina) {
                            if (!checkReferenceExclude(s) || isContainChina) {
                                compareOrgStr.add(s);
                                List<String> keyOrgSp = strList;
                                List<EsServerEntity> strcntest = new ArrayList<>();
                                try {
                                    strcntest = esSearch.getSearchListTongSelf(comIds, esClient, s, uid, "60%");
                                } catch (Exception e) {
                                    e.printStackTrace();
                                }
//                        log.info("检索出来的数量+strcntest"+strcntest.size()+"检索句子+"+s);
                                if (strcntest.size() > 0) {
                                    strcntest = strcntest.subList(0, 1);
                                }

                                // 只取es 返回结果第一条正式匹配的 其他忽略掉
                                for (EsServerEntity esEntityTemp : strcntest) {
                                    // file_id -> 自定义库的文章表id
                                    // table_id -> 自定义库的文章表uid
                                    Integer file_id = esEntityTemp.getFile_id();
                                    Integer table_id = esEntityTemp.getTable_id();
                                    if (StringUtil.isNotNull(file_id) && StringUtil.isNotNull(table_id)) {
                                        TextSelfDb textSelfDb = textSelfDbMapper.selectById(file_id);
                                        if (StringUtil.isNotNull(textSelfDb)) {
                                            String backPmid = textSelfDb.getId().toString();
                                            String[] sTemp2 = (esEntityTemp.getStr()).split(" ");
                                            StringBuffer stringBuffer = new StringBuffer();
                                            for (String s1 : sTemp2) {
                                                stringBuffer.append(getLemma(s1).toLowerCase() + " ");
                                            }
                                            String esSearchResLemma = stringBuffer.toString();
//                                            System.out.println("essearch++" + esSearchResLemma);
                                            List<String> esResList = StrUtils.sentenceParseWordList(esSearchResLemma);
                                            //相同个数得分
                                            Double aDouble = StrUtils.sentenceParseWordNewList(keyOrgSp, esResList);
                                            //相同个数
                                            Integer sameSize = StrUtils.sentenceParseWordSin(keyOrgSp, esResList);
                                            if (aDouble >= riskDouble) {
                                                esStrSum += sameSize;
                                                maxArticleMap.compute(backPmid, (key, value) -> value != null ? (value + sameSize) : sameSize);
                                            }
                                        } else {
                                            log.error("自定义文字库不存在id+" + file_id);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                SmallTool.printMessLongTimeTake("句子结束总耗时++", sss);
                SmallTool.printMess("真实分析的句子总数+" + compareOrgStr.size());
                //释放最匹配的文章信息
                //暂且使用前一百
                Map<String, Integer> stringIntegerMap = PublicUtils.mapSortValueLen(maxArticleMap, 5);
                List<String> strings2 = new ArrayList<>(stringIntegerMap.keySet());
                log.info("开始匹配最佳文章strings+" + strings2.size());
                Calendar calendar = Calendar.getInstance();
                String date = "" + calendar.get(Calendar.YEAR) + (calendar.get(Calendar.MONTH) + 1) + calendar.get(Calendar.DATE);
                // 临时下载全部pdf 路径 比对完删除全部数据 防止垃圾占用空间
                // 下载pdf 根路径
                String downTempPdfPath = "/home/analysis/comparepdftemp/" + date + "/";
                if (!FileUtil.exist(downTempPdfPath)) {
                    File path = new File(downTempPdfPath);
                    path.mkdirs();
                }
                // 2024年3月28日20:46:33
                // 复制到旧的文章数据到指定文件夹
                //存储的ut 或者pm 下载的全文路径 拍重 如果有重复下载的 不做pm 或者ut 过滤的情况 还需要耗时
//        Map<String, String> ButpmPdfPath = new LinkedHashMap<>();
                List<String> bFileDownPdfPath = new ArrayList<>();
                //  下载的文件名和对应 匹配的文章基础信息
                Map<String, OutEntity.BestArticle> filePdfNameArticleInfo = new LinkedHashMap<>();
                // 原始比对的pdf路径
                String apdfPath = uploadPath + orgPdfPath;
                log.info("原始文件路径+" + apdfPath);
                String afileName = new File(apdfPath).getName();
                FileUtil.copy(apdfPath, downTempPdfPath, true);
                if (strings2.size() > 0) {
                    for (String string : strings2) {
                        OutEntity.BestArticle bestArticle = new OutEntity.BestArticle();
                        String s = string;
                        TextSelfDb textSelfDb = textSelfDbMapper.selectById(Integer.parseInt(s));
                        if (textSelfDb != null) {
                            BackMatchEntity tableInfoNew = new BackMatchEntity();
                            tableInfoNew.setTitle(textSelfDb.getFileName());
                            BeanUtils.copyProperties(tableInfoNew, bestArticle);
                            String downPdfPath = getBestMatchArticleStrSelf(downTempPdfPath, textSelfDb, uploadPath);
                            if (StringUtils.isNotEmpty(downPdfPath)) {
                                bFileDownPdfPath.add(new File(downPdfPath).getName());
                                filePdfNameArticleInfo.put(new File(downPdfPath).getName(), bestArticle);
                            }
                        }
                    }
                }
                // 开始走老板逻辑进行获取比对全文信息
                Caculate(downTempPdfPath, afileName, bFileDownPdfPath, riskDouble, outEntity, filePdfNameArticleInfo);
                log.info("获取最匹配文章信息结束");
                outEntity.setAllSameSize(esStrSum);
                outEntity.setAllWordSize(strSum);
                outEntity.setEsava(1);
                log.info("转换数据结束");
            } else {
                outEntity.setEsava(2);
                outEntity.setAllSameSize(0);
                outEntity.setAllWordSize(0);
                outEntity.setSameDouble(0.0);
                log.info("本地文字库不存在 或者数量是0");
            }


        } else {
            Integer artYearInt = null;
            if (StringUtil.isNotNull(artYear)) {
                artYearInt = Integer.parseInt(artYear);
                outEntity.setAyear(artYearInt);
            }
            log.info("outEntity.setAyear++" + outEntity.getAyear());
            //2024年3月21日18:55:44 全新对比机制 采用相同单词数进行
            // es检索相似度采用 minimumShouldMatch 至少60% 采用list 比对相同字符数 高中低 采用 80% 70 % 60%相似度进行获取数据
            // 最后显示全部可能相似文章 不再获取最匹配的文字的相似度 计算不准确
            //参与比对的全文真实句子
            List<String> compareOrgStr = new ArrayList<>();
            // 是不是自定义对比
            boolean isMaul = false;
            if (splitAs != null && splitAs == 1) {
                isMaul = true;
            }
            if (!isMaul) {
                log.error(" 本地库对比 这句话应该不打印 全网比对");
            } else {
                log.info("我的自定义库进行对比");
            }

            boolean b = esSearch.checkLocalEsIsAVA(esClient);
            // 首先检测es 检索可用性 不行就后面操作就不做了
//        String strTest = "A bacterial strain, QWE-5, which utilized naphthalene as its sole carbon and energy source";
//        List<EsServerEntity> strcntestTest = esSearch.getSearchListTong(esClient, strTest, INDEX, fid, uid, splitAs, "60%");
            boolean isAVA = b;


            if (isAVA) {
                // 测试句子的总单词数量
                Integer strSum = 0;
                // 从ES检索到的句子的相同单词的数量总和
                Integer esStrSum = 0;
                //只有大于0.6相似度的才纳入统计
                Double riskDouble = 0.6;
                // 论文的Id号(pm or ut )及重复的总次数
                Map<String, Integer> maxArticleMap = new HashMap<>();
                // 存储所有检索结果
//        Map<String, EsServerEntity> tempMapAllESInfo = new LinkedHashMap<>();
                //存储临时数据 pm 数据 以后直接获取加快得到报告
//        Map<String, String> pminfoSetRes = new HashMap<>();
                log.info("待分析句子总数+" + str.size());
                long sss = System.currentTimeMillis();
                for (String s : str) {
//            SentenceDiskLevel sentenceDiskLevel = new SentenceDiskLevel();
                    if (StringUtil.isNotNull(s)) {
                        // 进行适配老板逻辑 2023年9月4日10:53:46  进行比对数据
                        String[] sTemp = StrUtils.repOtherStr(s).split(" ");
                        List<String> strList = new ArrayList<>();
                        // 原始语句不能处理要不然会在报告里面的句子就对应不起来了
                        for (String s1 : sTemp) {
                            strList.add(getLemma(s1).toLowerCase());
                        }
                        strSum += strList.size();
                        // 句子必须大于三个单词
                        if (strList.size() > 3) {
                            if (!checkReferenceExclude(s)) {
                                compareOrgStr.add(s);
                                List<String> keyOrgSp = strList;
                                List<EsServerEntity> strcntest = new ArrayList<>();
                                try {
                                    strcntest = esSearch.getSearchListTongSelf(comIds, esClient, s, uid, "60%");
                                } catch (Exception e) {
                                    e.printStackTrace();
                                }
//                        log.info("检索出来的数量+strcntest"+strcntest.size()+"检索句子+"+s);
                                if (strcntest.size() > 0) {
                                    strcntest = strcntest.subList(0, 1);
                                }
                                // 只取es 返回结果第一条正式匹配的 其他忽略掉
                                for (EsServerEntity esEntityTemp : strcntest) {
                                    // file_id -> 自定义库的文章表id
                                    // table_id -> 自定义库的文章表uid
                                    Integer file_id = esEntityTemp.getFile_id();
                                    Integer table_id = esEntityTemp.getTable_id();
                                    if (StringUtil.isNotNull(file_id) && StringUtil.isNotNull(table_id)) {
                                        TextSelfDb textSelfDb = textSelfDbMapper.selectById(file_id);
                                        if (StringUtil.isNotNull(textSelfDb)) {
                                            String backPmid = textSelfDb.getId().toString();
                                            String[] sTemp2 = (esEntityTemp.getStr()).split(" ");
                                            StringBuffer stringBuffer = new StringBuffer();
                                            for (String s1 : sTemp2) {
                                                stringBuffer.append(getLemma(s1).toLowerCase() + " ");
                                            }
                                            String esSearchResLemma = stringBuffer.toString();
//                                            System.out.println("essearch++" + esSearchResLemma);
                                            List<String> esResList = StrUtils.sentenceParseWordList(esSearchResLemma);
                                            //相同个数得分


                                            Double aDouble = StrUtils.sentenceParseWordNewList(keyOrgSp, esResList);
                                            //相同个数
                                            Integer sameSize = StrUtils.sentenceParseWordSin(keyOrgSp, esResList);
                                            if (aDouble >= riskDouble) {
                                                esStrSum += sameSize;
                                                maxArticleMap.compute(backPmid, (key, value) -> value != null ? (value + sameSize) : sameSize);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                SmallTool.printMessLongTimeTake("句子结束总耗时++", sss);
                SmallTool.printMess("真实分析的句子总数+" + compareOrgStr.size());
                //释放最匹配的文章信息
                //暂且使用前一百
                Map<String, Integer> stringIntegerMap = PublicUtils.mapSortValueLen(maxArticleMap, 5);
                List<String> strings2 = new ArrayList<>(stringIntegerMap.keySet());
                log.info("开始匹配最佳文章strings+" + strings2.size());
                Calendar calendar = Calendar.getInstance();
                String date = "" + calendar.get(Calendar.YEAR) + (calendar.get(Calendar.MONTH) + 1) + calendar.get(Calendar.DATE);
                // 临时下载全部pdf 路径 比对完删除全部数据 防止垃圾占用空间
                // 下载pdf 根路径
                String downTempPdfPath = "/home/analysis/comparepdftemp/" + date + "/";
                if (!FileUtil.exist(downTempPdfPath)) {
                    File path = new File(downTempPdfPath);
                    path.mkdirs();
                }
                // 2024年3月28日20:46:33
                // 复制到旧的文章数据到指定文件夹
                //存储的ut 或者pm 下载的全文路径 拍重 如果有重复下载的 不做pm 或者ut 过滤的情况 还需要耗时
//        Map<String, String> ButpmPdfPath = new LinkedHashMap<>();
                List<String> bFileDownPdfPath = new ArrayList<>();
                //  下载的文件名和对应 匹配的文章基础信息
                Map<String, OutEntity.BestArticle> filePdfNameArticleInfo = new LinkedHashMap<>();
                // 原始比对的pdf路径
                String apdfPath = uploadPath + orgPdfPath;
                log.info("原始文件路径+" + apdfPath);
                String afileName = new File(apdfPath).getName();
                FileUtil.copy(apdfPath, downTempPdfPath, true);
                if (strings2.size() > 0) {
                    for (String string : strings2) {
                        OutEntity.BestArticle bestArticle = new OutEntity.BestArticle();
                        String s = string;
                        TextSelfDb textSelfDb = textSelfDbMapper.selectById(Integer.parseInt(s));
                        if (textSelfDb != null) {
                            BackMatchEntity tableInfoNew = new BackMatchEntity();
                            tableInfoNew.setTitle(textSelfDb.getFileName());
                            BeanUtils.copyProperties(tableInfoNew, bestArticle);
                            String downPdfPath = getBestMatchArticleStrSelf(downTempPdfPath, textSelfDb, uploadPath);
                            if (StringUtils.isNotEmpty(downPdfPath)) {
                                bFileDownPdfPath.add(new File(downPdfPath).getName());
                                filePdfNameArticleInfo.put(new File(downPdfPath).getName(), bestArticle);
                            }
                        }
                    }
                }
                // 开始走老板逻辑进行获取比对全文信息
                Caculate(downTempPdfPath, afileName, bFileDownPdfPath, riskDouble, outEntity, filePdfNameArticleInfo);
                log.info("获取最匹配文章信息结束");
                outEntity.setAllSameSize(esStrSum);
                outEntity.setAllWordSize(strSum);
                outEntity.setEsava(1);
                log.info("转换数据结束");
            } else {
                outEntity.setEsava(2);
                outEntity.setAllSameSize(0);
                outEntity.setAllWordSize(0);
                outEntity.setSameDouble(0.0);
                log.info("本地文字库不存在 或者数量是0");
            }

        }


        return outEntity;
    }

    /**
     * 判断字符
     * <p>
     * //     * @param sentenceDiskLevel 赋值对象
     * //     * @param org               原始文章段落
     * //     * @param score             得分 相似度得分
     * //     * @param esid              es 检索的id
     * //     * @param maxSameTemp       相同的数量
     */
//    public void checkRiskSetVersion2(SentenceDiskLevel sentenceDiskLevel, String org, double score, List<String> keyOrgSp, List<SentenceDiskLevel> res, String esid, Integer maxSameTemp, Map<String, EsServerEntity> tempMap, boolean isMaul, Map<String, String> pminfoSetRes) {
//
//
//        //批量进行赋值
//        boolean isDeal = false;
//        if (score >= 80 & !isDeal) {
//            isDeal = true;
//            sentenceDiskLevel.setLevelName(LevelName.HIGHLEVEL);
//
//        }
//        if (score >= 70 & !isDeal) {
//            isDeal = true;
//            sentenceDiskLevel.setLevelName(LevelName.MIDLEVEL);
//
//        }
//        if (score >= 60 & !isDeal) {
//            isDeal = true;
//            sentenceDiskLevel.setLevelName(LevelName.LOWLEVEL);
//        }
//        if (StringUtil.isNotNull(sentenceDiskLevel.getLevelName())) {
//            //进行组合数据
//            List<SentenceEntity> ls = sentenceDiskLevel.getLs();
//            SentenceEntity sentenceEntity = new SentenceEntity();
//            sentenceEntity.setStr(org);
//            sentenceEntity.setStrInt(keyOrgSp.size());
//            sentenceEntity.setMaxMatchInt(maxSameTemp);
////            Integer size = keyOrgSp.size() + machStrOrgSize;
////            Integer size = machStrOrgSize;
////            double tempCa = maxSameTemp.doubleValue() / size.doubleValue();
//            sentenceEntity.setMaxDoubleMatch(score + "%");
//            EsServerEntity esServerEntity = tempMap.get(esid);
//            sentenceEntity.setMatchStr(esServerEntity.getStr());
//            sentenceEntity.setStrMatchRed(esServerEntity.getStrMatchRed());
//            sentenceEntity.setMatchStr(esServerEntity.getStrMatch());
//            sentenceEntity.setMatchStrInt(maxSameTemp);
//            List<BackMatchEntity> temp = new ArrayList<>();
//            tempMap.forEach((k, v1) -> {
//                //先把每句话放弃 太慢了 后续优化
//                Map<String, Object> param = new HashMap<>();
//                param.put("tableId", v1.getTable_id());
//                param.put("fileId", v1.getFile_id());
//                param.put("matchStr", v1.getStr());
//                param.put("isManual", String.valueOf(isMaul));
//                param.put("pmorut", Integer.parseInt(v1.getDatabase_type()));
//                String PMINFOKEY = v1.getTable_id() + "" + v1.getFile_id();
//                String post = "";
//                if (pminfoSetRes.containsKey(PMINFOKEY)) {
//                    post = pminfoSetRes.get(PMINFOKEY);
//                } else {
//                    String URL = escenterUrl + "web/pub/getpminfo";
//                    post = HttpUtil.post(URL, param);
//                    if (StringUtil.isNotNull(post)) {
//                        String newres = post;
//                        pminfoSetRes.put(PMINFOKEY, newres);
//                    }
//                }
//                Map parse = JSONObject.parseObject(post, Map.class);
//                BackMatchEntity tableInfo = null;
//                if (parse.get("code").toString().equals("0")) {
//                    tableInfo = JSONObject.parseObject(parse.get("data").toString(), BackMatchEntity.class);
//                }
//                if (tableInfo != null) {
//                    tableInfo.setMatchStr(v1.getStr());
//                    temp.add(tableInfo);
//                }
//            });
//
//            Map<String, Object> param = new HashMap<>();
//            param.put("tableId", esServerEntity.getTable_id());
//            param.put("fileId", esServerEntity.getFile_id());
//            param.put("matchStr", esServerEntity.getStr());
//            param.put("isManual", String.valueOf(isMaul));
//            param.put("pmorut", Integer.parseInt(esServerEntity.getDatabase_type()));
//            String PMINFOKEY = esServerEntity.getTable_id() + "" + esServerEntity.getFile_id();
//            String post = "";
//            if (pminfoSetRes.containsKey(PMINFOKEY)) {
//                post = pminfoSetRes.get(PMINFOKEY);
//            } else {
//                String URL = escenterUrl + "web/pub/getpminfo";
//                post = HttpUtil.post(URL, param);
//                if (StringUtil.isNotNull(post)) {
//                    String newres = post;
//                    pminfoSetRes.put(PMINFOKEY, newres);
//                }
//            }
//
//            Map parse = JSONObject.parseObject(post, Map.class);
//            BackMatchEntity tableInfo = null;
//            if (parse.get("code").toString().equals("0")) {
//                tableInfo = JSONObject.parseObject(parse.get("data").toString(), BackMatchEntity.class);
//            }
//            BackMatchEntity diskLevel = JSONObject.parseObject(JSONObject.toJSONString(tableInfo), BackMatchEntity.class);
//            if (tableInfo != null) {
//                sentenceEntity.setBackMatchEntity(tableInfo);
//                temp.add(diskLevel);
//            }
//            //放开十个请求进行查看单个
//            sentenceEntity.setBackMatchEntityAll(temp);
//            ls.add(sentenceEntity);
//            res.add(sentenceDiskLevel);
//        }
//    }
    public void Caculate(String rootPath, String afileName, List<String> paramBFileNameList, Double threshold, OutEntity outEntity, Map<String, OutEntity.BestArticle> filePdfNameArticleInfo) throws IOException {
        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        System.out.println(format.format(new Date()) + " 程序开始了~~~");
        int size = filePdfNameArticleInfo.size();
        System.out.println("所有论文pmut 全部匹配的文章有效集合是+" + size);
        String directory = rootPath;
        String a = afileName;
        List<String> bFileNameList = paramBFileNameList;
        System.out.println("a文章+" + a);
        System.out.println("b文章集合数量+" + paramBFileNameList.size());
        ArticleRepeatSummary.ComprehensiveInfo comprehensive = comprehensive(directory, a, bFileNameList, threshold);
        double copyProportion = comprehensive.getCopyProportion();
        List<ArticleRepeatSummary.CopyInfo> copyTop5List = comprehensive.getCopyTop5List();
        List<ArticleRepeatSummary.RiskLevelInfo> riskLevelList = comprehensive.getRiskLevelList();
//        System.out.println("总体相似度" + copyProportion);
        List<OutEntity.BestArticle> similarArtLs = new ArrayList<>();
        for (ArticleRepeatSummary.CopyInfo copyInfo : copyTop5List) {
            if (copyInfo.getCopyProportion() > 0.0) {
                System.out.println(copyInfo);
                // 最有可能相似的文章 后面word 展示yong
                OutEntity.BestArticle bestArticle1 = filePdfNameArticleInfo.get(copyInfo.getFileName());
                if (bestArticle1 != null) {
                    bestArticle1.setSimilarSize(copyInfo.getCopyProportion() + "%");
                } else {
                    log.info("匹配没有匹配到++" + copyInfo.getFileName());
                }
                similarArtLs.add(bestArticle1);
            }
        }
        outEntity.setBestArticles(similarArtLs);
        List<SentenceDiskLevel> sentenceDiskLevel = outEntity.getSentenceDiskLevel();
        for (ArticleRepeatSummary.RiskLevelInfo riskLevel : riskLevelList) {
//            System.out.println(riskLevel.getRiskLevel());
            SentenceDiskLevel sentenceDiskLevel1 = new SentenceDiskLevel();
            if (riskLevel.getRiskLevel().equals(LevelName.HIGHLEVEL)) {
                sentenceDiskLevel1.setLevelName(LevelName.HIGHLEVEL);
            }
            if (riskLevel.getRiskLevel().equals(LevelName.MIDLEVEL)) {
                sentenceDiskLevel1.setLevelName(LevelName.MIDLEVEL);
            }
            if (riskLevel.getRiskLevel().equals(LevelName.LOWLEVEL)) {
                sentenceDiskLevel1.setLevelName(LevelName.LOWLEVEL);
            }
            List<SentenceEntity> ls = sentenceDiskLevel1.getLs();
            List<ArticleRepeatSummary.RiskSentence> sentenceList = riskLevel.getSentenceList();
            for (ArticleRepeatSummary.RiskSentence s : sentenceList) {
                SentenceEntity ins = new SentenceEntity();
                ins.setStr(s.getASentence());
                ins.setMatchStr(s.getBSentence());
                ins.setStrMatchRed(s.getBSentence());
                ins.setStrMatch(s.getBSentence());
                ins.setMaxDoubleMatch(s.getCopyProportion() + "%");
                ins.setMaxDoubleMatchD(s.getCopyProportion());
                OutEntity.BestArticle bestArticle = filePdfNameArticleInfo.get(s.getFileName());
                BackMatchEntity match = new BackMatchEntity();
                match.setType(bestArticle.getType());
                match.setTitle(bestArticle.getTitle());
                match.setYear(bestArticle.getYear());
                match.setAu(bestArticle.getAu());
                match.setJour(bestArticle.getJour());
                match.setKey(bestArticle.getKey());
                match.setDoi(bestArticle.getDoi());
                ins.setBackMatchEntity(match);
                ls.add(ins);
            }
            sentenceDiskLevel.add(sentenceDiskLevel1);
        }
        outEntity.setSameDouble(copyProportion);
        //校验查看完毕 可以匹配进行适配新版代码 2024年3月28日20:13:51 八点多了 做多少干多少
    }

    /**
     * 获取最匹配到的文章全文文字进行文字比对用的
     *
     * @param wosBasic
     * @param rootPath 需要保存的pdf 路径 不做全文解析
     * @return 文章下载路径
     */
    @SneakyThrows
    public String getBestMatchArticleStr(String rootPath, WosBasic wosBasic) {
        // 下载后的全文路径
        String resPath = "";

        /**
         * 获取全文链接
         */
        String abase64 = "";
        String url = "";
        String fileName = "";
        if (!StringUtil.isEmpty(wosBasic.getCPm())) {
            fileName = wosBasic.getCPm();
        } else if (wosBasic.getCUt() != null) {
            fileName = wosBasic.getCUt().toLowerCase().replace("wos:", "");
        }
        //设置全文文件路径
        fileName = RegexUtil.filterNAC(fileName);
        BasicCookieStore basicCookieStore = new BasicCookieStore();
        if (fileName.length() > 100) {
            fileName = fileName.substring(0, 100);
        }
        String pdfPath = fileName + ".PDF";
        if (!FileUtil.exist(rootPath + pdfPath)) {
            if (!StringUtil.isEmpty(wosBasic.getCPm())) {
                abase64 = XXTEA.encryptToBase64StringNoSafe(wosBasic.getCPm(), "R0HWlCI3ndd1s8S3");
                url = "https://www.pubtsg.com/api/getpdfurl.do?unitid=1&type=0&query=" + abase64;

            } else if (wosBasic.getCUt() != null) {
                //abase64 = XXTEA.encryptToBase64StringNoSafe(wosBasic.getCUt().split(":")[1], "R0HWlCI3ndd1s8S3");
                abase64 = XXTEA.encryptToBase64StringNoSafe(wosBasic.getCUt(), "R0HWlCI3ndd1s8S3");
                url = "https://www.pubtsg.com/api/getpdfurl.do?unitid=1&type=1&query=" + abase64;
            }
            BasicCookieStore cookieStore = new BasicCookieStore();
            String html = common.proxy.HttpUtil.postHtmlQuanWen(cookieStore, url, "", "");
            Map<String, Object> map = null;
            if (html != null) {
                map = JSON.parseObject(html, Map.class);
            } else {
                log.info("下载全文是null 注意!!");
            }
            if (map != null) {
                if (map.get("pdfurl") != null) {//1094827  //40
                    String pdfUrl = (String) map.get("pdfurl");
//                    System.out.println("要存放的_PDF_URL___" + pdfUrl);
                    wosBasic.setFileType(1);
                    wosBasic.setPdfUrl(pdfUrl);
                    String getpdfurl = PdfUtil.getpdfurl(wosBasic.getPdfUrl(), 1, 1);

                    boolean flag = common.proxy.HttpUtil.DownPdfWithProxy(basicCookieStore, "https://attach.pubtsg.com/" + getpdfurl, "", rootPath + pdfPath);
//                log.info("匹配全文下载结果+" + flag);
                    wosBasic.setFilePath(pdfPath);
                    // 进行获取路径是+
//                log.info("uploadPath + pdfPath+" + rootPath + pdfPath);
                    resPath = rootPath + pdfPath;

                }
            }
        } else {
//            log.info("已存在不下载+" + rootPath + pdfPath);
            resPath = rootPath + pdfPath;
        }
//        log.info("resPath+"+resPath);
        return resPath;
    }

    /**
     * 把文章匹配的进行复制到这个目录下
     * 自检文字库专用
     *
     * @param rootPath 需要保存的pdf 路径 不做全文解析
     * @param selfDb   textSelfDb  对象
     * @return 文章下载路径
     */
    @SneakyThrows
    public String getBestMatchArticleStrSelf(String rootPath, TextSelfDb selfDb, String uploadPath) {
        // 下载后的全文路径
        String resPath = "";
        log.info("rootPATH =" + rootPath);
        log.info("uploadPath =" + uploadPath);
        //要把文件下载打这个目录下面才可以 2024年4月29日10:13:48
        String path = selfDb.getPath();
//        uploadPath = uploadPath + "uploadFile/";
        File source = new File(uploadPath + path);
        File topathFile = new File(rootPath + selfDb.getFileName());
        FileUtil.copy(source, topathFile, false);
        resPath = rootPath + selfDb.getFileName();
        return resPath;
    }


}
