package com.css.fxfzdzzh.modules.eqLandslide.count.service.impl;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.IdUtil;
import cn.hutool.poi.excel.ExcelReader;
import cn.hutool.poi.excel.ExcelUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.css.fxfzdzzh.base.attachment.entity.AttachmentInfoEntity;
import com.css.fxfzdzzh.base.attachment.repository.AttachmentInfoRepository;
import com.css.fxfzdzzh.base.attachment.service.AttachmentInfoService;
import com.css.fxfzdzzh.base.exception.ShpFieldException;
import com.css.fxfzdzzh.base.response.RestResponse;
import com.css.fxfzdzzh.constants.FxfzConstants;
import com.css.fxfzdzzh.constants.GeomTypeEnum;
import com.css.fxfzdzzh.modules.basicData.entity.SysDataVersion;
import com.css.fxfzdzzh.modules.basicData.repository.impl.SysDataVersionRepository;
import com.css.fxfzdzzh.modules.drill.service.DrillService;
import com.css.fxfzdzzh.constants.RedisKeyConstants;
import com.css.fxfzdzzh.constants.YNEnum;
import com.css.fxfzdzzh.enums.ResultEnum;
import com.css.fxfzdzzh.modules.basicData.service.impl.BasicLandformService;
import com.css.fxfzdzzh.modules.basicData.service.impl.LithologyService;
import com.css.fxfzdzzh.modules.eqLandslide.count.dto.HazardousAreaDto;
import com.css.fxfzdzzh.modules.eqLandslide.count.entity.*;
import com.css.fxfzdzzh.modules.eqLandslide.count.param.LargeScreenParam;
import com.css.fxfzdzzh.modules.eqLandslide.count.service.EqLandslideService;
import com.css.fxfzdzzh.modules.eqLandslide.count.param.TaskModelParam;
import com.css.fxfzdzzh.modules.eqLandslide.count.repository.EqLandslideRepository;
import com.css.fxfzdzzh.modules.eqLandslide.count.vo.*;
import com.css.fxfzdzzh.modules.eqLandslide.lawAnalysis.repository.LawAnalysisRepository;
import com.css.fxfzdzzh.modules.evaluate.entity.CountAreaResult;
import com.css.fxfzdzzh.modules.grid.entity.DistrictBoundary;
import com.css.fxfzdzzh.modules.hypergraph.service.HypergraphService;
import com.css.fxfzdzzh.modules.taskManage.entity.TaskEntity;
import com.css.fxfzdzzh.modules.taskManage.service.TaskService;
import com.css.fxfzdzzh.modules.tifData.service.ElevationMapService;
import com.css.fxfzdzzh.util.*;
import com.css.fxfzdzzh.util.fileParsing.ParsingResult;
import com.css.fxfzdzzh.util.fileParsing.shp.ShpAnalyticUtil;
import com.css.fxfzdzzh.web.PlatformSessionContext;
import com.css.fxfzdzzh.zcpt.sys.entity.SUser;
import com.css.fxfzdzzh.zcpt.sys.service.SUserService;
import com.css.fxfzdzzh.zcpt.sys.service.SysAreaService;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.gson.Gson;
import com.google.gson.internal.LinkedTreeMap;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.http.*;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSourceUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.interceptor.TransactionAspectSupport;
import org.springframework.util.ResourceUtils;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.net.URLEncoder;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @Description: fxfzdzzh
 * @Author: lhl
 * @CreateDate: 2023/2/15 11:23
 */
@Service
public class EqLandslideServiceImpl implements EqLandslideService {

    @Resource
    EqLandslideRepository eqLandslideRepository;
    @Resource
    TaskService taskService;
    @Resource
    SUserService sUserService;
    @Resource
    SysAreaService sysAreaService;
    @Resource
    LawAnalysisRepository lawAnalysisRepository;
    @Resource
    SysDataVersionRepository sysDataVersionRepository;
    @Resource
    DrillService drillService;
    @Value("${basicData.url}")
    public String basicDataUrl;
    @Value("${file.localDir}")
    public String fileUrl;
    @Resource
    LithologyService lithologyService;
    @Resource
    RedisTemplate redisTemplate;
    @Resource
    private BasicLandformService basicLandformService;
    @Resource
    ElevationMapService elevationMapService;
    @Resource
    AttachmentInfoService attachmentInfoService;
    @Resource
    RestTemplate restTemplate;
    @Resource
    private RedisTemplate<String, String> redisTemplate1;

    @Resource
    HypergraphService hypergraphService;
    @Resource
    JdbcTemplate jdbcTemplate;
    @Resource
    NamedParameterJdbcTemplate namedParameterJdbcTemplate;
    @Autowired
    private RestTemplateUtil restTemplateUtil;
    @Resource
    AttachmentInfoRepository attachmentInfoRepository;


    //1层次分析法模型
    @Override
    @Transactional(rollbackFor = Exception.class)
    public RestResponse count1(DzzhModelCcfx entity) {
        RestResponse restResponse = null;
        String taskId = entity.getTaskId();
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        try {
            Map<String, String> areaIdCodeMap = (Map<String, String>) JSON.parse((String) redisTemplate.opsForValue().get(FxfzConstants.CACHE_SYS_KEY + "areaIdCodeMap"));

            TaskEntity taskEntity = taskService.viewTask(taskId);
            String geom = taskEntity.getGeom();
            if (PlatformObjectUtils.isNotEmpty(redisTemplate.opsForValue().get(taskId))) {
                return RestResponse.fail("该线程中的该数据:{}" + taskEntity.getTaskName() + "正在被计算！");
            }
            redisTemplate.opsForValue().set(taskId, "2", 10L, TimeUnit.SECONDS);
            String dtmId = UUIDGenerator.getUUID();
            entity.setDtmId(dtmId + ",");
            String modelId = saveDzzhModelCcfx(entity);


            String factor1 = entity.getFactor1();
            String factor2 = entity.getFactor2();
            //获取矩阵
            Map<String, BigDecimal[][]> bigDecimals = getBigDecimals(entity);
            Map<String, Map<String, BigDecimal>> mapMap = new HashMap<>();
            //一致性校验（返回值有问题，返回值应为 归一性权重 ）
            for (String s : bigDecimals.keySet()) {
                if (s.contains("a")) {
                    mapMap.put("a", getFactor(s, bigDecimals.get(s)));
                } else if (s.contains("b")) {
                    BigDecimal[][] bigDecimals1 = bigDecimals.get(s);
                    if (bigDecimals1.length == 1) {
                        mapMap.put("b", getFactor(s + factor1, bigDecimals.get(s)));
                    } else {
                        Map<String, BigDecimal> factor = getFactor(s, bigDecimals.get(s));
                        mapMap.put("b", factor);
                        if (bigDecimals1.length > 2) {
                            if (BigDecimal.ZERO.compareTo(factor.get("status")) == 0) {
                                return RestResponse.fail("您输入的判断矩阵不符合一致性检验要求，请重新调整。");
                            }
                        }
                    }
                } else if (s.contains("c")) {
                    BigDecimal[][] bigDecimals1 = bigDecimals.get(s);
                    if (bigDecimals1.length == 1) {
                        mapMap.put("c", getFactor(s + factor2, bigDecimals.get(s)));
                    } else {
                        Map<String, BigDecimal> factor = getFactor(s, bigDecimals.get(s));
                        mapMap.put("c", factor);
                        if (bigDecimals1.length > 2) {
                            if (BigDecimal.ZERO.compareTo(factor.get("status")) == 0) {
                                return RestResponse.fail("您输入的判断矩阵不符合一致性检验要求，请重新调整。");
                            }
                        }
                    }
                }
            }

            Map<String, BigDecimal> factorMap = new HashMap<>();
            Map<String, BigDecimal> a = mapMap.get("a");
            Map<String, BigDecimal> b = mapMap.get("b");
            Map<String, BigDecimal> c = mapMap.get("c");
            for (String s1 : b.keySet()) {
                factorMap.put(s1, b.get(s1).multiply(a.get("a1")).setScale(8, RoundingMode.HALF_UP));
            }
            for (String s2 : c.keySet()) {
                factorMap.put(s2, c.get(s2).multiply(a.get("a2")).setScale(8, RoundingMode.HALF_UP));
            }

            //查询数据库矢量数据到dzzhTifCount--任务区域
            String currTime = System.currentTimeMillis() + "";
            String clipLayerName = "temp_dzzh任务_" + currTime;
            String sqlFilter = "dt_id='" + taskId + "'";
            Map<String, Object> map0 = new HashMap<>();
            map0.put("type", "dzzhVectorQuery");
            map0.put("layerName", "dzzh_task");
            map0.put("resultLayerName", clipLayerName);
            map0.put("sqlFilter", sqlFilter);
            map0.put("queryType", "fxfzdzzh");
            RestResponse serverToken0 = hypergraphService.getServerToken(map0);
            if (serverToken0.getCode() != 200) {
                return RestResponse.fail(503, "计算失败！");
            }

            //地层岩性
            JSONObject jsonObject2 = new JSONObject();
            if (factor1.contains("2")) {
                //先删除非此版本的数据  如果是选择的地层岩性的版本的话此操作不会删除任何数据因为在归并时已经删除，该操作针对的是直接导入归并结果
                //导入时但不一定计算 所以不能删除之前生成或导入的归并数据
                 eqLandslideRepository.deleteByVersion(taskId,entity.getLithologyVersion(),"2");
                //地层岩性
                RestResponse serverToken2 = vectorTif("vectorTif-1", "dzzh_lithology_merge_result", clipLayerName,
                        "values1", "lithology_version_code='" + entity.getLithologyVersion() + "' and task_id='" + taskId + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken2.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getLithologyVersion() + "版本的地层岩性数据");
                }
                String resultLayerName2 = JSON.parseObject(JSON.toJSONString(serverToken2.getData()))
                        .getString("resultLayerName");
                jsonObject2.put("resultLayerName2", resultLayerName2);
            }
            //断层
            JSONObject jsonObject3 = new JSONObject();
            if (factor1.contains("3")) {
                Map<String, Object> requestMap = new HashMap<>();
                requestMap.put("type", "dzzhVectorBufferTif");
                requestMap.put("layerName", clipLayerName);
                requestMap.put("queryType", "断层");
                requestMap.put("timeStamp", currTime);

                RestResponse serverToken = hypergraphService.getServerToken(requestMap);
                if (serverToken.getCode() == 200) {
                    String resultLayerName3 = JSON.parseObject(JSON.toJSONString(serverToken.getData()))
                            .getString("resultLayerName");
                    jsonObject3.put("resultLayerName3", resultLayerName3);
                }
            }
            //水系
            JSONObject jsonObject4 = new JSONObject();
            if (factor1.contains("4")) {
                Map<String, Object> requestMap = new HashMap<>();
                requestMap.put("type", "dzzhVectorBufferTif");
                requestMap.put("layerName", clipLayerName);
                requestMap.put("queryType", "水系");
                requestMap.put("timeStamp", currTime);

                RestResponse serverToken = hypergraphService.getServerToken(requestMap);
                if (serverToken.getCode() == 200) {
                    String resultLayerName4 = JSON.parseObject(JSON.toJSONString(serverToken.getData()))
                            .getString("resultLayerName");
                    jsonObject4.put("resultLayerName3", resultLayerName4);
                }
            }
            //气候---------
            JSONObject jsonObject5 = new JSONObject();
            if (factor2.contains("1")) {
                eqLandslideRepository.delDzzhJcClimate(taskId);
                String url = basicDataUrl + "/datacenter/jcClimate/getByArea?1=1";
                for (String s1 : taskEntity.getTaskDistrict().split("、")) {
                    if (s1 != null) {
                        String[] s2 = s1.split("-");
                        String divisionIds = null;
                        try {
                            if (s2.length == 1) {
                                url = url + "&province=" + s2[0] + "&city=&county=";
                                divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                        null, null);
                            } else if (s2.length == 2 || s2.length == 3) {
                                url = url + "&province=" + s2[0] + "&city=" + s2[1] + "&county=";
                                if (s2.length == 2) {
                                    divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                            URLEncoder.encode(s2[1], "UTF-8"), null);
                                } else if (s2.length == 3) {
                                    divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                            URLEncoder.encode(s2[1], "UTF-8"), URLEncoder.encode(s2[2], "UTF-8"));
                                }
                            }
                        } catch (UnsupportedEncodingException e) {
                            e.printStackTrace();
                        }
                        if (divisionIds == null) {
                            return null;
                        }
                        String geomText = null;
                        String[] split = divisionIds.split("_");
                        if (split.length == 1) {
                            geomText = getGeomByCode(areaIdCodeMap.get(split[0]), "1");
                        } else if (split.length == 2) {
                            geomText = getGeomByCode(areaIdCodeMap.get(split[1]), "2");
                        } else {
                            geomText = getGeomByCode(areaIdCodeMap.get(split[2]), "3");
                        }
                        if (PlatformObjectUtils.isEmpty(geomText)) {
                            return RestResponse.fail("未获取到气候数据的" + s2.toString() + "的空间数据，无法进行计算！");
                        }
                        JSONArray jsonArray = (JSONArray) drillService.getJCSJ(url, 3).getData();
                        BigDecimal jyl = BigDecimal.ZERO;
                        for (int i = 0; i < jsonArray.size(); i++) {
                            JSONObject jsonObject = jsonArray.getJSONObject(i);
                            jyl = jyl.add(jsonObject.getBigDecimal("avPrec"));
                        }
                        DzzhJcClimate dzzhJcClimate = new DzzhJcClimate();
                        dzzhJcClimate.setId(UUIDGenerator.getUUID());
                        dzzhJcClimate.setAvPrec(jyl);
                        dzzhJcClimate.setGeomText(geomText);
                        dzzhJcClimate.setTaskId(taskId);
                        BigDecimal values1 = null;
                        if (jyl.compareTo(BigDecimal.valueOf(1600)) > -1) {
                            values1 = BigDecimal.valueOf(1);
                        } else if (jyl.compareTo(BigDecimal.valueOf(1200)) > -1 && jyl.compareTo(BigDecimal.valueOf(1600)) < 0) {
                            values1 = BigDecimal.valueOf(0.67);
                        } else if (jyl.compareTo(BigDecimal.valueOf(800)) > -1 && jyl.compareTo(BigDecimal.valueOf(1200)) < 0) {
                            values1 = BigDecimal.valueOf(0.33);
                        } else if (jyl.compareTo(BigDecimal.valueOf(400)) > -1 && jyl.compareTo(BigDecimal.valueOf(800)) < 0) {
                            values1 = BigDecimal.valueOf(0.11);
                        } else if (jyl.compareTo(BigDecimal.valueOf(400)) < 0) {
                            values1 = BigDecimal.valueOf(0);
                        }
                        dzzhJcClimate.setValues1(values1);
                        eqLandslideRepository.saveDzzhJcClimate(dzzhJcClimate);
                    }
                }
                // 刷新指定超图工作空间数据源数据集
                boolean b2 = hypergraphService.getWorkspaceReload("dzzh_jc_climate");
                RestResponse serverToken5 = vectorTif("vectorTif-1", "dzzh_jc_climate", clipLayerName,
                        "values1", "task_id='" + taskId + "'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken5.getCode() != 200) {
                    return RestResponse.fail("该区域没有气候数据");
                }
                String resultLayerName5 = JSON.parseObject(JSON.toJSONString(serverToken5.getData()))
                        .getString("resultLayerName");
                jsonObject5.put("resultLayerName5", resultLayerName5);
            }
            //道路
            JSONObject jsonObject6 = new JSONObject();
            if (factor2.contains("2")) {
                Map<String, Object> requestMap = new HashMap<>();
                requestMap.put("type", "dzzhVectorBufferTif");
                requestMap.put("layerName", clipLayerName);
                requestMap.put("queryType", "道路");
                requestMap.put("timeStamp", currTime);

                RestResponse serverToken = hypergraphService.getServerToken(requestMap);
                if (serverToken.getCode() == 200) {
                    String resultLayerName6 = JSON.parseObject(JSON.toJSONString(serverToken.getData()))
                            .getString("resultLayerName");
                    jsonObject4.put("resultLayerName3", resultLayerName6);
                }
            }
            //地震动峰值加速度
            Map<String, JSONObject> mapMap7 = new HashMap<>();
            if (factor2.contains("3")) {
                //地震动峰值加速度-2%
                if (PlatformObjectUtils.isNotEmpty(entity.getA1Version())) {
                    RestResponse serverToken7 = vectorTif("vectorTif-2", "jc_pga_002", clipLayerName,
                            "year50_pro_exceedance002", "data_version='" + entity.getA1Version() + "' and del_flag='0'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                    if (serverToken7.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA1Version() + "版本的地震峰值加速度数据");
                    }
                    JSONObject jsonObject7 = new JSONObject();
                    String resultLayerName7 = JSON.parseObject(JSON.toJSONString(serverToken7.getData()))
                            .getString("resultLayerName");
                    jsonObject7.put("resultLayerName", resultLayerName7);
                    mapMap7.put("1", jsonObject7);
                }
                //地震动峰值加速度-10%
                if (PlatformObjectUtils.isNotEmpty(entity.getA2Version())) {
                    RestResponse serverToken8 = vectorTif("vectorTif-2", "jc_pga_010", clipLayerName,
                            "year50_pro_exceedance_010", "data_version='" + entity.getA2Version() + "' and del_flag='0'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                    if (serverToken8.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA2Version() + "版本的地震峰值加速度数据");
                    }
                    JSONObject jsonObject8 = new JSONObject();
                    String resultLayerName8 = JSON.parseObject(JSON.toJSONString(serverToken8.getData()))
                            .getString("resultLayerName");
                    jsonObject8.put("resultLayerName", resultLayerName8);
                    mapMap7.put("2", jsonObject8);
                }
                //地震动峰值加速度-62%
                if (PlatformObjectUtils.isNotEmpty(entity.getA3Version())) {
                    RestResponse serverToken9 = vectorTif("vectorTif-2", "jc_pga_063", clipLayerName,
                            "year50_pro_exceedance_063", "data_version='" + entity.getA3Version() + "' and del_flag='0'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                    if (serverToken9.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA3Version() + "版本的地震峰值加速度数据");
                    }
                    JSONObject jsonObject9 = new JSONObject();
                    String resultLayerName9 = JSON.parseObject(JSON.toJSONString(serverToken9.getData()))
                            .getString("resultLayerName");
                    jsonObject9.put("resultLayerName", resultLayerName9);
                    mapMap7.put("3", jsonObject9);
                }
            }
            Map<String, Object> map = new HashMap<>();
            map.put("entity", entity);
            map.put("factorMap", factorMap);
            map.put("factor1", factor1);
            map.put("jsonObject2", jsonObject2);
            map.put("jsonObject3", jsonObject3);
            map.put("jsonObject4", jsonObject4);
            map.put("jsonObject5", jsonObject5);
            map.put("jsonObject6", jsonObject6);
            map.put("mapMap7", mapMap7);
            map.put("currTime", currTime);
            map.put("clipLayerName", clipLayerName);
            //计算
            restResponse = getDataBySlopeVersion1(geom, entity.getSlopeVersion(), taskId,
                    dtmId, "1", map);
            if (restResponse.getCode() != 200) {
                return restResponse;
            }
            DzzhTaskModel dzzhTaskModel2 = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
            if (PlatformObjectUtils.isEmpty(dzzhTaskModel2)) {
                saveDzzhTaskModel(taskId, dtmId, "1",entity.getAttachId(),entity.getLithology());
            } else {
                dzzhTaskModel2.setEvaluateStatus("2");
                dzzhTaskModel2.setResultStatus("2");
                dzzhTaskModel2.setUpdateUser(PlatformSessionUtils.getUserId());
                dzzhTaskModel2.setUpdateTime(PlatformDateUtils.getCurrentTimestamp());
                dzzhTaskModel2.setModelType("1");
                if(StringUtils.isNotBlank(entity.getAttachId())){
                    dzzhTaskModel2.setAttachId(entity.getAttachId());
                }else{
                    dzzhTaskModel2.setAttachId(dzzhTaskModel2.getAttachId());
                }
                if(StringUtils.isNotBlank(entity.getLithology())){
                    dzzhTaskModel2.setLithology(entity.getLithology());
                }else{
                    dzzhTaskModel2.setLithology(dzzhTaskModel2.getLithology());
                }

                eqLandslideRepository.updateDzzhTaskModel1(dzzhTaskModel2);
            }

            JSONArray jsonArray = JSON.parseObject(restResponse.getData().toString()).getJSONArray("resultLayerName");
            for (int i = 0; i < jsonArray.size(); i++) {
                String fileName = jsonArray.getString(i);
                DzzhResultCcfx ccfx = new DzzhResultCcfx();
                ccfx.setId(UUIDGenerator.getUUID());
                ccfx.setProExceed("a" + String.valueOf(i + 1));
                ccfx.setFilePath(fileName);
                ccfx.setTaskId(taskId);
                ccfx.setModelId(modelId);
                eqLandslideRepository.saveDzzhResultCcfx(ccfx);
            }

            restResponse.setMessage("计算成功！");
            return restResponse;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (restResponse != null) {
                if (restResponse.getCode() == 200) {
                    delOldAndSaveNew(dzzhTaskModel, taskId);
                    redisTemplate.delete(taskId);
                } else {
                    //事务手动回滚
                    TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
                }
            }

        }
        return RestResponse.fail("计算失败！");
    }

    /*//1层次分析法模型
    @Transactional(rollbackFor = Exception.class)
    public RestResponse oldCount1(DzzhModelCcfx entity) {
        RestResponse restResponse = null;
        String taskId = entity.getTaskId();
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        try {
            Map<String, String> areaIdCodeMap = (Map<String, String>) JSON.parse((String) redisTemplate.opsForValue().get(FxfzConstants.CACHE_SYS_KEY + "areaIdCodeMap"));

            TaskEntity taskEntity = taskService.viewTask(taskId);
            String geom = taskEntity.getGeom();
            if (PlatformObjectUtils.isNotEmpty(redisTemplate.opsForValue().get(taskId))) {
                return RestResponse.fail("该线程中的该数据:{}" + taskEntity.getTaskName() + "正在被计算！");
            }
            redisTemplate.opsForValue().set(taskId, "2", 10L, TimeUnit.SECONDS);
            String dtmId = UUIDGenerator.getUUID();
            entity.setDtmId(dtmId + ",");
            String modelId = saveDzzhModelCcfx(entity);


            String factor1 = entity.getFactor1();
            String factor2 = entity.getFactor2();
            //获取矩阵
            Map<String, BigDecimal[][]> bigDecimals = getBigDecimals(entity);
            Map<String, Map<String, BigDecimal>> mapMap = new HashMap<>();
            //一致性校验（返回值有问题，返回值应为 归一性权重 ）
            for (String s : bigDecimals.keySet()) {
                // Map<String, BigDecimal> factor = getFactor(bigDecimals.get(s));
                if (s.contains("a")) {
                    mapMap.put("a", getFactor(s, bigDecimals.get(s)));
                } else if (s.contains("b")) {
                    BigDecimal[][] bigDecimals1 = bigDecimals.get(s);
                    if (bigDecimals1.length == 1) {
                        mapMap.put("b", getFactor(s + factor1, bigDecimals.get(s)));
                    } else {
                        Map<String, BigDecimal> factor = getFactor(s, bigDecimals.get(s));
                        mapMap.put("b", factor);
                        if (bigDecimals1.length>2){
                            if (BigDecimal.ZERO.compareTo(factor.get("status"))==0) {
                                // throw new RuntimeException("您输入的判断矩阵不符合一致性检验要求，请重新调整。");
                                return RestResponse.fail("您输入的判断矩阵不符合一致性检验要求，请重新调整。");
                            }
                        }
                    }
                } else if (s.contains("c")) {
                    BigDecimal[][] bigDecimals1 = bigDecimals.get(s);
                    if (bigDecimals1.length == 1) {
                        mapMap.put("c", getFactor(s + factor2, bigDecimals.get(s)));
                    } else {
                        Map<String, BigDecimal> factor = getFactor(s, bigDecimals.get(s));
                        mapMap.put("c", factor);
                        if (bigDecimals1.length>2){
                            if (BigDecimal.ZERO.compareTo(factor.get("status"))==0) {
                                // throw new RuntimeException("您输入的判断矩阵不符合一致性检验要求，请重新调整。");
                                return RestResponse.fail("您输入的判断矩阵不符合一致性检验要求，请重新调整。");
                            }
                        }
                    }
                }
            }

            Map<String, BigDecimal> factorMap = new HashMap<>();
            Map<String, BigDecimal> a = mapMap.get("a");
            Map<String, BigDecimal> b = mapMap.get("b");
            Map<String, BigDecimal> c = mapMap.get("c");
            for (String s1 : b.keySet()) {
                factorMap.put(s1, b.get(s1).multiply(a.get("a1")).setScale(8, RoundingMode.HALF_UP));
            }
            for (String s2 : c.keySet()) {
                factorMap.put(s2, c.get(s2).multiply(a.get("a2")).setScale(8, RoundingMode.HALF_UP));
            }

            //查询数据库矢量数据到dzzhTifCount--任务区域
            String currTime = System.currentTimeMillis() + "";
//            String clipLayerName = "dzzh_task";
            String clipLayerName = "temp_dzzh任务_" + currTime;
            String sqlFilter = "dt_id='"+taskId+"'";
            Map<String, Object> map0 = new HashMap<>();
            map0.put("type", "dzzhVectorQuery");
            map0.put("layerName", "dzzh_task");
            map0.put("resultLayerName", clipLayerName);
            map0.put("sqlFilter", sqlFilter);
            map0.put("queryType", "fxfzdzzh");
            RestResponse serverToken0 = hypergraphService.getServerToken(map0);
            if (serverToken0.getCode() != 200) {
                return RestResponse.fail(503, "计算失败！");
            }

            //地层岩性
            JSONObject jsonObject2 = new JSONObject();
            if (factor1.contains("2")) {
                //地层岩性
                RestResponse serverToken2 = vectorTif("vectorTif-1", "dzzh_lithology_merge_result", clipLayerName,
                        "values1", "lithology_version_code='" + entity.getLithologyVersion() + "' and task_id='" + taskId + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                if (serverToken2.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getLithologyVersion() + "版本的地层岩性数据");
                }
                String resultLayerName2 = JSON.parseObject(JSON.toJSONString(serverToken2.getData()))
                        .getString("resultLayerName");
                // String resultLayerName2 = "dzzh_lithology_merge_result_矢量转栅格_1698136084336";
                jsonObject2.put("resultLayerName2", resultLayerName2);

                Map<String, Object> map2_1 = new HashMap<>();
                map2_1.put("type", "maxMinTif");
                map2_1.put("layerName", resultLayerName2);//矢量转栅格的数据集
                RestResponse serverToken2_1 = hypergraphService.getServerToken(map2_1);
                if (serverToken2_1.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getLithologyVersion() + "版本的地层岩性数据！！！");
                }
                BigDecimal min2 = JSON.parseObject(JSON.toJSONString(serverToken2_1.getData())).getBigDecimal("min");
                BigDecimal max2 = JSON.parseObject(JSON.toJSONString(serverToken2_1.getData())).getBigDecimal("max");
                jsonObject2.put("min2", min2);
                jsonObject2.put("max2", max2);
            }
            //断层
            JSONObject jsonObject3 = new JSONObject();
            if (factor1.contains("3")) {
                eqLandslideRepository.delDzzhCountBuffer1(taskId, "dzzh_count_buffer1_dc");
                JSONArray jsonArray1 = getUrl(entity.getHdcVersion(), geom);
                if (jsonArray1.size() > 0) {
                    //1,0-500;2,500-1000;3,1000-2000;4,2000-3000;5,>3000
                    Integer number1 = 500, number2 = 0;
                    for (int key = 1; key < 6; key++) {
                        switch (String.valueOf(key)) {
                            case "1":
                                number1 = 500;
                                number2 = 0;
                                break;
                            case "2":
                                number1 = 1000;
                                number2 = 500;
                                break;
                            case "3":
                                number1 = 2000;
                                number2 = 1000;
                                break;
                            case "4":
                                number1 = 3000;
                                number2 = 2000;
                                break;
                            case "5":
                                number1 = 3500;
                                number2 = 3000;
                                break;
                        }
                        BigDecimal param3 = getParam3(String.valueOf(key));
                        String sumGeom = "";//全部断层线空间数据
                        for (int i = 0; i < jsonArray1.size(); i++) {
                            if (PlatformObjectUtils.isNotEmpty(sumGeom)) {
                                sumGeom = lawAnalysisRepository.getStUnion(sumGeom, jsonArray1.getJSONObject(i).getString(
                                        "geom"));
                            } else {
                                sumGeom = jsonArray1.getJSONObject(i).getString(
                                        "geom");
                            }
                        }
                        if (PlatformObjectUtils.isEmpty(sumGeom)) {
                            continue;
                            // return RestResponse.fail("未获取到断层数据的空间数据，无法进行计算！");
                        }
                        String geoms = eqLandslideRepository.getByGeom2(geom, sumGeom, number1, number2);
                        if (PlatformObjectUtils.isEmpty(geoms)) {
                            // throw new RuntimeException("未获取到断层数据的空间数据，无法进行计算！");
                            return RestResponse.fail("未获取到断层数据的空间数据，无法进行计算！");
                        }
                        if (geoms.indexOf("MULTIPOLYGON") != 0) {
                            if (geoms.indexOf("POLYGON") == 0) {
                                geoms = geoms.replace("POLYGON", "MULTIPOLYGON(") + ")";
                            } else {
                                continue;
                            }
                        }
                        DzzhCountBuffer dzzhCountBuffer = new DzzhCountBuffer();
                        dzzhCountBuffer.setId(UUIDGenerator.getUUID());
                        dzzhCountBuffer.setValues1(param3);
                        dzzhCountBuffer.setGeomText(geoms);
                        dzzhCountBuffer.setTaskId(taskId);
                        eqLandslideRepository.saveDzzhCountBuffer1(dzzhCountBuffer,"dzzh_count_buffer1_dc");
                    }
                    //刷新指定超图工作空间数据源数据集
                    boolean b1 = hypergraphService.getWorkspaceReload("dzzh_count_buffer1_dc");
                    RestResponse serverToken3 = vectorTif("vectorTif-1", "dzzh_count_buffer1_dc", clipLayerName,
                            "values1", "task_id='" + taskId + "'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                    if (serverToken3.getCode() != 200) {
                        // throw new RuntimeException("该区域没有断层数据");
                        return RestResponse.fail("该区域没有断层数据");
                    }
                    String resultLayerName3 = JSON.parseObject(JSON.toJSONString(serverToken3.getData()))
                            .getString("resultLayerName");
                    jsonObject3.put("resultLayerName3", resultLayerName3);

                    Map<String, Object> map3_1 = new HashMap<>();
                    map3_1.put("type", "maxMinTif");
                    map3_1.put("layerName", resultLayerName3);//矢量转栅格的数据集
                    RestResponse serverToken3_1 = hypergraphService.getServerToken(map3_1);
                    if (serverToken3_1.getCode() != 200) {
                        // throw new RuntimeException("该区域没有断层数据");
                        return RestResponse.fail("该区域没有断层数据！！！");
                    }
                    BigDecimal min3 = JSON.parseObject(JSON.toJSONString(serverToken3_1.getData())).getBigDecimal("min");
                    BigDecimal max3 = JSON.parseObject(JSON.toJSONString(serverToken3_1.getData())).getBigDecimal("max");
                    jsonObject3.put("min3", min3);
                    jsonObject3.put("max3", max3);
                }
            }
            //水系
            JSONObject jsonObject4 = new JSONObject();
            if (factor1.contains("4")) {
                eqLandslideRepository.delDzzhCountBuffer1(taskId, "dzzh_count_buffer1_sx");
//            String url2 = basicDataUrl + "/datacenter/jcHydl/findAll?geom=" + geom;
//            JSONArray jsonArray2 = (JSONArray) drillService.getJCSJ(url2, 3).getData();
                JSONObject valueMap = new JSONObject();
                valueMap.put("geom", geom);
                String url2 = basicDataUrl + "/datacenter/jcHydl/findAllUnion";
                JSONArray jsonArray2 = (JSONArray) drillService.getJCSJ(url2, 4, valueMap).getData();
                if (jsonArray2 == null || jsonArray2.size() < 1) {
                    // throw new RuntimeException("该区域没有水系数据");
                    return RestResponse.fail("该区域没有水系数据");
                }
                // 1,0-500;2,500-1000;3,1000-2000;4,2000-4000;5,>4000
                Integer number1 = 500, number2 = 0;
                for (int key = 1; key < 6; key++) {
                    switch (String.valueOf(key)) {
                        case "1":
                            number1 = 500;
                            number2 = 0;
                            break;
                        case "2":
                            number1 = 1000;
                            number2 = 500;
                            break;
                        case "3":
                            number1 = 2000;
                            number2 = 1000;
                            break;
                        case "4":
                            number1 = 4000;
                            number2 = 2000;
                            break;
                        case "5":
                            number1 = 4500;
                            number2 = 4000;
                            break;
                    }
                    BigDecimal param3 = getParam3(String.valueOf(key));
                    String sumGeom = "";
                    for (int i = 0; i < jsonArray2.size(); i++) {
                        if (PlatformObjectUtils.isNotEmpty(sumGeom)) {
                            sumGeom = lawAnalysisRepository.getStUnion(sumGeom, jsonArray2.getJSONObject(i).getString(
                                    "geom"));
                        } else {
                            sumGeom = jsonArray2.getJSONObject(i).getString("geom");
                        }
                    }
                    if (PlatformObjectUtils.isEmpty(sumGeom)) {
                        continue;
                        //return RestResponse.fail("未获取到水系数据的空间数据，无法进行计算！");
                    }
                    String geoms = eqLandslideRepository.getByGeom2(geom, sumGeom, number1, number2);
                    if (PlatformObjectUtils.isEmpty(geoms)) {
                        // throw new RuntimeException("未获取到水系数据的空间数据，无法进行计算！");
                        return RestResponse.fail("未获取到水系数据的空间数据，无法进行计算！");
                    }
                    if (geoms.indexOf("MULTIPOLYGON") != 0) {
                        if (geoms.indexOf("POLYGON") == 0) {
                            geoms = geoms.replace("POLYGON", "MULTIPOLYGON(") + ")";
                        } else {
                            continue;
                        }
                    }
                    DzzhCountBuffer dzzhCountBuffer = new DzzhCountBuffer();
                    dzzhCountBuffer.setId(UUIDGenerator.getUUID());
                    dzzhCountBuffer.setValues1(param3);
                    dzzhCountBuffer.setGeomText(geoms);
                    dzzhCountBuffer.setTaskId(taskId);
                    eqLandslideRepository.saveDzzhCountBuffer1(dzzhCountBuffer, "dzzh_count_buffer1_sx");
                }
                // 刷新指定超图工作空间数据源数据集
                boolean b2 = hypergraphService.getWorkspaceReload("dzzh_count_buffer1_sx");
                RestResponse serverToken4 = vectorTif("vectorTif-1", "dzzh_count_buffer1_sx", clipLayerName,
                        "values1", "task_id='" + taskId + "'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                if (serverToken4.getCode() != 200) {
                    // throw new RuntimeException("该区域没有水系数据！");
                    return RestResponse.fail("该区域没有水系数据");
                }
                String resultLayerName4 = JSON.parseObject(JSON.toJSONString(serverToken4.getData()))
                        .getString("resultLayerName");
                // String resultLayerName4 = "dzzh_count_buffer_矢量转栅格_1698218049104";
                jsonObject4.put("resultLayerName4", resultLayerName4);

                Map<String, Object> map4_1 = new HashMap<>();
                map4_1.put("type", "maxMinTif");
                map4_1.put("layerName", resultLayerName4);//矢量转栅格的数据集
                RestResponse serverToken4_1 = hypergraphService.getServerToken(map4_1);
                if (serverToken4_1.getCode() != 200) {
                    // throw new RuntimeException("该区域没有水系数据！");
                    return RestResponse.fail("该区域没有水系数据！！！");
                }
                BigDecimal min4 = JSON.parseObject(JSON.toJSONString(serverToken4_1.getData())).getBigDecimal("min");
                BigDecimal max4 = JSON.parseObject(JSON.toJSONString(serverToken4_1.getData())).getBigDecimal("max");
                jsonObject4.put("min4", min4);
                jsonObject4.put("max4", max4);
            }
            //气候---------
            JSONObject jsonObject5 = new JSONObject();
            if (factor2.contains("1")) {
                eqLandslideRepository.delDzzhJcClimate(taskId);
                String url = basicDataUrl + "/datacenter/jcClimate/getByArea?1=1";
                for (String s1 : taskEntity.getTaskDistrict().split("、")) {
                    if (s1 != null) {
                        String[] s2 = s1.split("-");
                        String divisionIds = null;
                        try {
                            if (s2.length == 1) {
                                url = url + "&province=" + s2[0] + "&city=&county=";
                                divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                        null, null);
                            } else if (s2.length == 2 || s2.length == 3) {
                                url = url + "&province=" + s2[0] + "&city=" + s2[1] + "&county=";
                                if (s2.length == 2) {
                                    divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                            URLEncoder.encode(s2[1], "UTF-8"), null);
                                } else if (s2.length == 3) {
                                    divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                            URLEncoder.encode(s2[1], "UTF-8"), URLEncoder.encode(s2[2], "UTF-8"));
                                }
                            }*//* else if (s2.length == 3) {
                        url = url + "&province=" + s2[0] + "&city=" + s2[1] + "&county=";
                        geomText = taskService.queryGeomTextByProvinceCityCountyName(s2[0], s2[1], s2[2]);
                    }*//*
                        } catch (UnsupportedEncodingException e) {
                            e.printStackTrace();
                        }
                        if (divisionIds == null) {
                            return null;
                        }
                        String geomText = null;
                        String[] split = divisionIds.split("_");
                        if (split.length == 1) {
                            geomText = getGeomByCode(areaIdCodeMap.get(split[0]), "1");
                        } else if (split.length == 2) {
                            geomText = getGeomByCode(areaIdCodeMap.get(split[1]), "2");
                        } else {
                            geomText = getGeomByCode(areaIdCodeMap.get(split[2]), "3");
                        }
                        if (PlatformObjectUtils.isEmpty(geomText)) {
                            // throw new RuntimeException("未获取到气候数据的" + s2.toString() + "的空间数据，无法进行计算！");
                            return RestResponse.fail("未获取到气候数据的" + s2.toString() + "的空间数据，无法进行计算！");
                        }
                        JSONArray jsonArray = (JSONArray) drillService.getJCSJ(url, 3).getData();
                        BigDecimal jyl = BigDecimal.ZERO;
                        for (int i = 0; i < jsonArray.size(); i++) {
                            JSONObject jsonObject = jsonArray.getJSONObject(i);
                            jyl = jyl.add(jsonObject.getBigDecimal("avPrec"));
                        }
                        DzzhJcClimate dzzhJcClimate = new DzzhJcClimate();
                        dzzhJcClimate.setId(UUIDGenerator.getUUID());
                        dzzhJcClimate.setAvPrec(jyl);
                        dzzhJcClimate.setGeomText(geomText);
                        dzzhJcClimate.setTaskId(taskId);
                        BigDecimal values1 = null;
                        if (jyl.compareTo(BigDecimal.valueOf(1600)) > -1) {
                            values1 = BigDecimal.valueOf(1);
                        } else if (jyl.compareTo(BigDecimal.valueOf(1200)) > -1 && jyl.compareTo(BigDecimal.valueOf(1600)) < 0) {
                            values1 = BigDecimal.valueOf(0.67);
                        } else if (jyl.compareTo(BigDecimal.valueOf(800)) > -1 && jyl.compareTo(BigDecimal.valueOf(1200)) < 0) {
                            values1 = BigDecimal.valueOf(0.33);
                        } else if (jyl.compareTo(BigDecimal.valueOf(400)) > -1 && jyl.compareTo(BigDecimal.valueOf(800)) < 0) {
                            values1 = BigDecimal.valueOf(0.11);
                        } else if (jyl.compareTo(BigDecimal.valueOf(400)) < 0) {
                            values1 = BigDecimal.valueOf(0);
                        }
                        eqLandslideRepository.saveDzzhJcClimate(dzzhJcClimate);
                    }
                }
                // 刷新指定超图工作空间数据源数据集
                boolean b2 = hypergraphService.getWorkspaceReload("dzzh_jc_climate");
                RestResponse serverToken5 = vectorTif("vectorTif-1", "dzzh_jc_climate", clipLayerName,
                        "values1", "task_id='" + taskId + "'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                if (serverToken5.getCode() != 200) {
                    // throw new RuntimeException("该区域没有气候数据！");
                    return RestResponse.fail("该区域没有气候数据");
                }
                String resultLayerName5 = JSON.parseObject(JSON.toJSONString(serverToken5.getData()))
                        .getString("resultLayerName");
                // String resultLayerName5="dzzh_jc_climate_矢量转栅格_1698220297074";
                jsonObject5.put("resultLayerName5", resultLayerName5);

                Map<String, Object> map5_1 = new HashMap<>();
                map5_1.put("type", "maxMinTif");
                map5_1.put("layerName", resultLayerName5);//矢量转栅格的数据集
                RestResponse serverToken5_1 = hypergraphService.getServerToken(map5_1);
                if (serverToken5_1.getCode() != 200) {
                    // throw new RuntimeException("该区域没有气候数据！");
                    return RestResponse.fail("该区域没有气候数据！！！");
                }
                BigDecimal min5 = JSON.parseObject(JSON.toJSONString(serverToken5_1.getData())).getBigDecimal("min");
                BigDecimal max5 = JSON.parseObject(JSON.toJSONString(serverToken5_1.getData())).getBigDecimal("max");
                jsonObject5.put("min5", min5);
                jsonObject5.put("max5", max5);
            }
            //道路
            JSONObject jsonObject6 = new JSONObject();
            if (factor2.contains("2")) {
                eqLandslideRepository.delDzzhCountBuffer1(taskId, "dzzh_count_buffer1_dl");
                JSONArray jsonArray6 = getUrl(entity.getRoadVersion(), geom);
                if (jsonArray6.size() < 1) {
                    // throw new RuntimeException("该区域没有" + entity.getRoadVersion() + "版本的道路数据");
                    return RestResponse.fail("该区域没有" + entity.getRoadVersion() + "版本的道路数据");
                }
                //1,<500;2,500-1000;3,1000-1500;4,1500-2000;5,>=2000
                Integer number1 = 500, number2 = 0;
                for (int key = 1; key < 6; key++) {
                    switch (String.valueOf(key)) {
                        case "1":
                            number1 = 500;
                            number2 = 0;
                            break;
                        case "2":
                            number1 = 1000;
                            number2 = 500;
                            break;
                        case "3":
                            number1 = 2000;
                            number2 = 1000;
                            break;
                        case "4":
                            number1 = 4000;
                            number2 = 2000;
                            break;
                        case "5":
                            number1 = 4500;
                            number2 = 4000;
                            break;
                    }
                    BigDecimal param3 = getParam3(String.valueOf(key));
                    String sumGeom = "";//全部断层线空间数据
                    for (int i = 0; i < jsonArray6.size(); i++) {
                        if (PlatformObjectUtils.isNotEmpty(sumGeom)) {
                            sumGeom = lawAnalysisRepository.getStUnion(sumGeom, jsonArray6.getJSONObject(i).getString(
                                    "geom"));
                        } else {
                            sumGeom = jsonArray6.getJSONObject(i).getString(
                                    "geom");
                        }
                    }
                    if (PlatformObjectUtils.isEmpty(sumGeom)) {
                        continue;
                    }
                    String geoms = eqLandslideRepository.getByGeom3(geom, sumGeom, number1, number2);
                    if (PlatformObjectUtils.isEmpty(geoms)) {
                        // throw new RuntimeException("未获取到道路数据的空间数据，无法进行计算！");
                        return RestResponse.fail("未获取到道路数据的空间数据，无法进行计算！");
                    }
                    if (geoms.indexOf("MULTIPOLYGON") != 0) {
                        if (geoms.indexOf("POLYGON") == 0) {
                            geoms = geoms.replace("POLYGON", "MULTIPOLYGON(") + ")";
                        } else {
                            continue;
                        }
                    }
                    DzzhCountBuffer dzzhCountBuffer = new DzzhCountBuffer();
                    dzzhCountBuffer.setId(UUIDGenerator.getUUID());
                    dzzhCountBuffer.setValues1(param3);
                    dzzhCountBuffer.setGeomText(geoms);
                    dzzhCountBuffer.setTaskId(taskId);
                    eqLandslideRepository.saveDzzhCountBuffer1(dzzhCountBuffer,"dzzh_count_buffer1_dl");
                }
                //刷新指定超图工作空间数据源数据集
                boolean b3 = hypergraphService.getWorkspaceReload("dzzh_count_buffer1_dl");
                RestResponse serverToken6 = vectorTif("vectorTif-1", "dzzh_count_buffer1_dl", clipLayerName,
                        "values1", "task_id='" + taskId + "'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                if (serverToken6.getCode() != 200) {
                    // throw new RuntimeException("该区域没有道路数据！");
                    return RestResponse.fail("该区域没有道路数据");
                }
                String resultLayerName6 = JSON.parseObject(JSON.toJSONString(serverToken6.getData()))
                        .getString("resultLayerName");
                // String resultLayerName6="dzzh_count_buffer_矢量转栅格_1698283945977";
                jsonObject6.put("resultLayerName6", resultLayerName6);

                Map<String, Object> map6_1 = new HashMap<>();
                map6_1.put("type", "maxMinTif");
                map6_1.put("layerName", resultLayerName6);//矢量转栅格的数据集
                RestResponse serverToken6_1 = hypergraphService.getServerToken(map6_1);
                if (serverToken6_1.getCode() != 200) {
                    // throw new RuntimeException("该区域没有道路数据！");
                    return RestResponse.fail("该区域没有道路数据！！！");
                }
                BigDecimal min6 = JSON.parseObject(JSON.toJSONString(serverToken6_1.getData())).getBigDecimal("min");
                BigDecimal max6 = JSON.parseObject(JSON.toJSONString(serverToken6_1.getData())).getBigDecimal("max");
                jsonObject6.put("min6", min6);
                jsonObject6.put("max6", max6);
            }
            //地震动峰值加速度
            Map<String, JSONObject> mapMap7 = new HashMap<>();
            if (factor2.contains("3")) {
                //地震动峰值加速度-2%
                if (PlatformObjectUtils.isNotEmpty(entity.getA1Version())){
                    RestResponse serverToken7 = vectorTif("vectorTif-2", "jc_pga_002", clipLayerName,
                            "year50_pro_exceedance002", "data_version='" + entity.getA1Version() + "' and del_flag='0'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                    if (serverToken7.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA1Version() + "版本的地震峰值加速度数据");
                    }
                    JSONObject jsonObject7 = new JSONObject();
                    String resultLayerName7 = JSON.parseObject(JSON.toJSONString(serverToken7.getData()))
                            .getString("resultLayerName");
                    jsonObject7.put("resultLayerName", resultLayerName7);

                    Map<String, Object> map7_1 = new HashMap<>();
                    map7_1.put("type", "maxMinTif");
                    map7_1.put("layerName", resultLayerName7);//矢量转栅格的数据集
                    RestResponse serverToken7_1 = hypergraphService.getServerToken(map7_1);
                    if (serverToken7_1.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA1Version() + "版本的地震峰值加速度数据！！！");
                    }
                    jsonObject7.put("min", JSON.parseObject(JSON.toJSONString(serverToken7_1.getData())).getBigDecimal(
                            "min"));
                    jsonObject7.put("max", JSON.parseObject(JSON.toJSONString(serverToken7_1.getData())).getBigDecimal(
                            "max"));
                    mapMap7.put("1", jsonObject7);
                }

                //地震动峰值加速度-10%
                if (PlatformObjectUtils.isNotEmpty(entity.getA2Version())){
                    RestResponse serverToken8 = vectorTif("vectorTif-2", "jc_pga_010", clipLayerName,
                            "year50_pro_exceedance_010", "data_version='" + entity.getA2Version() + "' and del_flag='0'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                    if (serverToken8.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA2Version() + "版本的地震峰值加速度数据");
                    }
                    JSONObject jsonObject8 = new JSONObject();
                    String resultLayerName8 = JSON.parseObject(JSON.toJSONString(serverToken8.getData()))
                            .getString("resultLayerName");
                    jsonObject8.put("resultLayerName", resultLayerName8);

                    Map<String, Object> map8_1 = new HashMap<>();
                    map8_1.put("type", "maxMinTif");
                    map8_1.put("layerName", resultLayerName8);//矢量转栅格的数据集
                    RestResponse serverToken8_1 = hypergraphService.getServerToken(map8_1);
                    if (serverToken8_1.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA2Version() + "版本的地震峰值加速度数据！！！");
                    }
                    jsonObject8.put("min", JSON.parseObject(JSON.toJSONString(serverToken8_1.getData())).getBigDecimal(
                            "min"));
                    jsonObject8.put("max", JSON.parseObject(JSON.toJSONString(serverToken8_1.getData())).getBigDecimal(
                            "max"));
                    mapMap7.put("2", jsonObject8);
                }

                //地震动峰值加速度-62%
                if (PlatformObjectUtils.isNotEmpty(entity.getA3Version())){
                    RestResponse serverToken9 = vectorTif("vectorTif-2", "jc_pga_063", clipLayerName,
                            "year50_pro_exceedance_063", "data_version='" + entity.getA3Version() + "' and del_flag='0'"
                            , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'",currTime);
                    if (serverToken9.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA3Version() + "版本的地震峰值加速度数据");
                    }
                    JSONObject jsonObject9 = new JSONObject();
                    String resultLayerName9 = JSON.parseObject(JSON.toJSONString(serverToken9.getData()))
                            .getString("resultLayerName");
                    jsonObject9.put("resultLayerName", resultLayerName9);

                    Map<String, Object> map9_1 = new HashMap<>();
                    map9_1.put("type", "maxMinTif");
                    map9_1.put("layerName", resultLayerName9);//矢量转栅格的数据集
                    RestResponse serverToken9_1 = hypergraphService.getServerToken(map9_1);
                    if (serverToken9_1.getCode() != 200) {
                        return RestResponse.fail("该区域没有" + entity.getA3Version() + "版本的地震峰值加速度数据！！！");
                    }
                    jsonObject9.put("min", JSON.parseObject(JSON.toJSONString(serverToken9_1.getData())).getBigDecimal(
                            "min"));
                    jsonObject9.put("max", JSON.parseObject(JSON.toJSONString(serverToken9_1.getData())).getBigDecimal(
                            "max"));
                    mapMap7.put("3", jsonObject9);
                }
            }

            Map<String, Object> map = new HashMap<>();
            map.put("entity", entity);
            map.put("factorMap", factorMap);
            map.put("factor1", factor1);
            map.put("jsonObject2", jsonObject2);
            map.put("jsonObject3", jsonObject3);
            map.put("jsonObject4", jsonObject4);
            map.put("jsonObject5", jsonObject5);
            map.put("jsonObject6", jsonObject6);
            map.put("mapMap7", mapMap7);
            map.put("currTime",currTime);
            map.put("clipLayerName",clipLayerName);

            //计算
            restResponse = getDataBySlopeVersion1(geom, entity.getSlopeVersion(), taskId,
                    dtmId, "1", map);
            if (restResponse.getCode() != 200) {
                return restResponse;
            }
            DzzhTaskModel dzzhTaskModel2 = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
            if (PlatformObjectUtils.isEmpty(dzzhTaskModel2)) {
                saveDzzhTaskModel(taskId, dtmId, "1");
            } else {
                dzzhTaskModel2.setEvaluateStatus("2");
                dzzhTaskModel2.setResultStatus("2");
                dzzhTaskModel2.setUpdateUser(PlatformSessionUtils.getUserId());
                dzzhTaskModel2.setUpdateTime(PlatformDateUtils.getCurrentTimestamp());
                dzzhTaskModel2.setModelType("1");
                eqLandslideRepository.updateDzzhTaskModel1(dzzhTaskModel2);
            }

            JSONArray jsonArray = JSON.parseObject(restResponse.getData().toString()).getJSONArray("resultLayerName");
            for (int i = 0; i < jsonArray.size(); i++) {
                String fileName = jsonArray.getString(i);
                DzzhResultCcfx ccfx = new DzzhResultCcfx();
                ccfx.setId(UUIDGenerator.getUUID());
                String s = fileName.split("_")[fileName.split("_").length - 1];
                ccfx.setProExceed(s);
               *//* if ("1".equals(s)) {
                    ccfx.setProExceed("50年超越概率2%");
                } else if ("2".equals(s)) {
                    ccfx.setProExceed("50年超越概率10%");
                } else {
                    ccfx.setProExceed("50年超越概率63%");
                }*//*
                ccfx.setFilePath(fileName);
                ccfx.setTaskId(taskId);
                ccfx.setModelId(modelId);
                eqLandslideRepository.saveDzzhResultCcfx(ccfx);
            }

            restResponse.setMessage("计算成功！");
            return restResponse;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (restResponse != null) {
                if (restResponse.getCode() == 200) {
                    delOldAndSaveNew(dzzhTaskModel, taskId);
                    redisTemplate.delete(taskId);
                }else {
                    //事务手动回滚
                    TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
                }
            }

        }
        return RestResponse.fail("计算失败！");
    }*/

    private RestResponse count1Detail(String resultLayerName1, Map<String, Object> map, String currTime) {
        JSONArray jsonArray2 = new JSONArray();
        JSONObject jsonObject = new JSONObject();
        Map<String, BigDecimal> factorMap = (Map<String, BigDecimal>) map.get("factorMap");
        DzzhModelCcfx entity = (DzzhModelCcfx) map.get("entity");
        JSONObject jsonObject1 = (JSONObject) map.get("jsonObject1");
        JSONObject jsonObject2 = (JSONObject) map.get("jsonObject2");
        JSONObject jsonObject3 = (JSONObject) map.get("jsonObject3");
        JSONObject jsonObject4 = (JSONObject) map.get("jsonObject4");
        JSONObject jsonObject5 = (JSONObject) map.get("jsonObject5");
        JSONObject jsonObject6 = (JSONObject) map.get("jsonObject6");
        Map<String, JSONObject> mapMap7 = (Map<String, JSONObject>) map.get("mapMap7");
        String resultLayerName_1 = "";
        String resultLayerName_2 = "";
        String resultLayerName_3 = "";
        String resultLayerName_4 = "";
        String resultLayerName_5 = "";
        String resultLayerName_6 = "";

        String countString = "";
        if (jsonObject1 == null) {
            countString = "Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0)";
        } else {
            //坡度
            countString = "(Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0," +
                    "Con([dzzhTifCount." + resultLayerName1 + "]<10,0,Con([dzzhTifCount." + resultLayerName1 + "]<25,0.11," +
                    "Con([dzzhTifCount." + resultLayerName1 + "]<40,0.33,Con([dzzhTifCount." + resultLayerName1 + "]<60,0.67,1" +
                    ")))))" + ")*" + factorMap.get("b1");
            //栅格计算
            resultLayerName_1 = "temp_dzzh层次分析法模型计算_坡度_" + currTime + "_" + 1;//坡度
            RestResponse restResponse1 = countTif("countTif2", countString, resultLayerName_1);
            if (restResponse1.getCode() != 200) {
                return restResponse1;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse11 = coypTif1("coypTif1", resultLayerName_1, resultLayerName_1);
            if (restResponse11.getCode() != 200) {
                return restResponse11;
            }
        }

        //地层岩性
        if (entity.getFactor1().contains("2")) {
            String resultLayerName2 = jsonObject2.getString("resultLayerName2");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName2)) {
                countString = "([dzzhTifCount." + resultLayerName2 + "])*" + factorMap.get("b2");
            }
            //栅格计算
            resultLayerName_2 = "temp_dzzh层次分析法模型计算_地层岩性_" + currTime + "_" + 1;//坡度
            RestResponse restResponse2 = countTif("countTif2", countString, resultLayerName_2);
            if (restResponse2.getCode() != 200) {
                return restResponse2;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse22 = coypTif1("coypTif1", resultLayerName_2, resultLayerName_2);
            if (restResponse22.getCode() != 200) {
                return restResponse22;
            }
        }

        //断层
        if (entity.getFactor1().contains("3")) {
            String resultLayerName3 = jsonObject3.getString("resultLayerName3");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName3)) {
                countString = "([dzzhTifCount." + resultLayerName3 + "])*" + factorMap.get("b3");
            }
            //栅格计算
            resultLayerName_3 = "temp_dzzh层次分析法模型计算_断层_" + currTime + "_" + 1;//坡度
            RestResponse restResponse3 = countTif("countTif2", countString, resultLayerName_3);
            if (restResponse3.getCode() != 200) {
                return restResponse3;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse33 = coypTif1("coypTif1", resultLayerName_3, resultLayerName_3);
            if (restResponse33.getCode() != 200) {
                return restResponse33;
            }
        }

        //水系
        if (entity.getFactor1().contains("4")) {
            String resultLayerName4 = jsonObject4.getString("resultLayerName4");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName4)) {
                countString = "([dzzhTifCount." + resultLayerName4 + "])*" + factorMap.get("b4");
            }
            //栅格计算
            resultLayerName_4 = "temp_dzzh层次分析法模型计算_水系_" + currTime + "_" + 1;//坡度
            RestResponse restResponse4 = countTif("countTif2", countString, resultLayerName_4);
            if (restResponse4.getCode() != 200) {
                return restResponse4;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse44 = coypTif1("coypTif1", resultLayerName_4, resultLayerName_4);
            if (restResponse44.getCode() != 200) {
                return restResponse44;
            }
        }

        //气候
        if (entity.getFactor2().contains("1")) {
            String resultLayerName5 = jsonObject5.getString("resultLayerName5");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName5)) {
                countString = "([dzzhTifCount." + resultLayerName5 + "])*" + factorMap.get("c1");
            }
            //栅格计算
            resultLayerName_5 = "temp_dzzh层次分析法模型计算_气候_" + currTime + "_" + 1;//坡度
            RestResponse restResponse5 = countTif("countTif2", countString, resultLayerName_5);
            if (restResponse5.getCode() != 200) {
                return restResponse5;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse55 = coypTif1("coypTif1", resultLayerName_5, resultLayerName_5);
            if (restResponse55.getCode() != 200) {
                return restResponse55;
            }
        }

        //道路
        if (entity.getFactor2().contains("2")) {
            String resultLayerName6 = jsonObject6.getString("resultLayerName6");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName6)) {
                countString = countString + "+([dzzhTifCount." + resultLayerName6 + "])*" + factorMap.get("c2");
            }
            //栅格计算
            resultLayerName_6 = "temp_dzzh层次分析法模型计算_道路_" + currTime + "_" + 1;//坡度
            RestResponse restResponse6 = countTif("countTif2", countString, resultLayerName_6);
            if (restResponse6.getCode() != 200) {
                return restResponse6;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse66 = coypTif1("coypTif1", resultLayerName_6, resultLayerName_6);
            if (restResponse66.getCode() != 200) {
                return restResponse66;
            }
        }

        //最终计算
        for (Map.Entry<String, JSONObject> a : mapMap7.entrySet()) {
            JSONObject value = a.getValue();
            String resultLayerName6 = value.getString("resultLayerName");
            String countString6 = "";
            if (PlatformObjectUtils.isNotEmpty(resultLayerName6)) {
                countString = "(Con([dzzhTifCount." + resultLayerName6 + "]>=0.75,1," +
                        "Con([dzzhTifCount." + resultLayerName6 + "]>=0.38,0.67,Con([dzzhTifCount." + resultLayerName6 + "]>=0.19,0.33," +
                        "Con([dzzhTifCount." + resultLayerName6 + "]>=0.09,0.11,Con([dzzhTifCount." + resultLayerName6 + "]>=0.04,0," +
                        "0)))))" + ")*" + factorMap.get("c3");
            }
            if (PlatformObjectUtils.isEmpty(countString)) {
                countString6 = "Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0,0)";
            }
            //地震动-栅格计算
            String resultLayerName_7 = "temp_dzzh层次分析法模型计算_地震动_" + currTime + "_t" + a.getKey();//坡度
            RestResponse restResponse7 = countTif("countTif2", countString, resultLayerName_7);
            if (restResponse7.getCode() != 200) {
                return restResponse7;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse77 = coypTif1("coypTif1", resultLayerName_7, resultLayerName_7);
            if (restResponse77.getCode() != 200) {
                return restResponse77;
            }

            //最终计算
            //服务器栅格计算
            countString6 = "";
            String countString61 = "";
            String countString62 = "";
            String countString63 = "";
            String countString64 = "";
            String countString65 = "";
            String countString66 = "";
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_1)) {
                countString61 = "+[testdzzhTifCount." + resultLayerName_1 + "]";
            }
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_2)) {
                countString62 = "+[testdzzhTifCount." + resultLayerName_2 + "]";
            }
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_3)) {
                countString63 = "+[testdzzhTifCount." + resultLayerName_3 + "]";
            }
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_4)) {
                countString64 = "+[testdzzhTifCount." + resultLayerName_4 + "]";
            }
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_5)) {
                countString65 = "+[testdzzhTifCount." + resultLayerName_5 + "]";
            }
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_6)) {
                countString66 = "+[testdzzhTifCount." + resultLayerName_6 + "]";
            }
            countString6 = "[testdzzhTifCount." + resultLayerName_7 + "]" + countString61 + countString62 +
                    countString63 + countString64 + countString65 + countString66;
            String resultLayerName_s = "层次分析法模型_最终计算_" + currTime + "_a" + a.getKey();//裁剪计算结果数据集
            RestResponse restResponse8 = countTif("countTif3", countString6, resultLayerName_s);
            if (restResponse8.getCode() != 200) {
                return restResponse8;
            }
            //结果复制栅格数据集dzzh
            RestResponse restResponse88 = coypTif1("coypTif2", resultLayerName_s, resultLayerName_s);
            if (restResponse88.getCode() != 200) {
                return restResponse88;
            }

            //自然分割法
            Map<String, Object> map3 = new HashMap<>();
            map3.put("type", "reclassify");
            map3.put("layerName", resultLayerName_s);
            map3.put("flag", "");
            RestResponse serverToken = hypergraphService.getServerToken(map3);
            if (serverToken.getCode() == 200) {
                JSONObject finJsonObject = (JSONObject) serverToken.getData();
                jsonArray2.add(finJsonObject.getString("layerName"));
            }
        }
        jsonObject.put("resultLayerName", jsonArray2);
        return RestResponse.succeed(jsonObject);
    }

   /* private JSONObject oldCount1Detail(String resultLayerName1, Map<String, Object> map, String currTime) {
        JSONArray jsonArray2 = new JSONArray();
        JSONObject jsonObject = new JSONObject();
        Map<String, BigDecimal> factorMap = (Map<String, BigDecimal>) map.get("factorMap");
        DzzhModelCcfx entity = (DzzhModelCcfx) map.get("entity");
        JSONObject jsonObject1 = (JSONObject) map.get("jsonObject1");
        JSONObject jsonObject2 = (JSONObject) map.get("jsonObject2");
        JSONObject jsonObject3 = (JSONObject) map.get("jsonObject3");
        JSONObject jsonObject4 = (JSONObject) map.get("jsonObject4");
        JSONObject jsonObject5 = (JSONObject) map.get("jsonObject5");
        JSONObject jsonObject6 = (JSONObject) map.get("jsonObject6");
        Map<String, JSONObject> mapMap7 = (Map<String, JSONObject>) map.get("mapMap7");

        String countString = "";
        if (jsonObject1 == null) {
            countString = "Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0)";
        } else {
            //坡度
            BigDecimal min1 = jsonObject1.getBigDecimal("min1");
            BigDecimal max1 = jsonObject1.getBigDecimal("max1");
            //获取赋值后的最大最小值
            //Map<String,BigDecimal> maxMinMap = getMaxMin(max1,min1,"C1");
            min1 = new BigDecimal("1");
            max1 = new BigDecimal("3");
            if (max1.compareTo(min1) != 0) {
                BigDecimal subtract = max1.subtract(min1);
                countString = "((Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0," +
                        "Con([dzzhTifCount." + resultLayerName1 + "]<10,1,Con([dzzhTifCount." + resultLayerName1 + "]<25,2," +
                        "Con([dzzhTifCount." + resultLayerName1 + "]<40,4,Con([dzzhTifCount." + resultLayerName1 + "]<60,7,10" +
                        ")))))-" + min1 + ")/" + subtract + ")*" + factorMap.get("b1");
            }
        }
        //地层岩性
        if (entity.getFactor1().contains("2")) {
            String resultLayerName2 = jsonObject2.getString("resultLayerName2");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName2)) {
                BigDecimal min2 = jsonObject2.getBigDecimal("min2");
                BigDecimal max2 = jsonObject2.getBigDecimal("max2");
                if (max2.compareTo(min2) != 0) {
                    BigDecimal subtract = max2.subtract(min2);
                    countString = countString + "+(([dzzhTifCount." + resultLayerName2 + "]-" + min2 + ")/" + subtract + ")*" + factorMap.get("b2");
                }
            }
        }
        //断层
        if (entity.getFactor1().contains("3")) {
            String resultLayerName3 = jsonObject3.getString("resultLayerName3");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName3)) {
                BigDecimal min3 = jsonObject3.getBigDecimal("min3");
                BigDecimal max3 = jsonObject3.getBigDecimal("max3");
                if (max3.compareTo(min3) != 0) {
                    BigDecimal subtract = max3.subtract(min3);
                    countString = countString + "+(([dzzhTifCount." + resultLayerName3 + "]-" + min3 + ")/" + subtract + ")*" + factorMap.get("b3");
                }
            }
        }
        //水系
        if (entity.getFactor1().contains("4")) {
            String resultLayerName4 = jsonObject4.getString("resultLayerName4");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName4)) {
                BigDecimal min4 = jsonObject4.getBigDecimal("min4");
                BigDecimal max4 = jsonObject4.getBigDecimal("max4");
                if (max4.compareTo(min4) != 0) {
                    BigDecimal subtract = max4.subtract(min4);
                    countString = countString + "+(([dzzhTifCount." + resultLayerName4 + "]-" + min4 + ")/" + subtract + ")*" + factorMap.get("b4");
                }
            }
        }
        //气候
        if (entity.getFactor2().contains("1")) {
            String resultLayerName5 = jsonObject5.getString("resultLayerName5");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName5)) {
                BigDecimal min5 = jsonObject5.getBigDecimal("min5");
                BigDecimal max5 = jsonObject5.getBigDecimal("max5");
                if (max5.compareTo(min5) != 0) {
                    BigDecimal subtract = max5.subtract(min5);
                    countString = countString + "+(([dzzhTifCount." + resultLayerName5 + "]-" + min5 + ")/" + subtract + ")*" + factorMap.get(
                            "c1");
                }
            }
        }
        //道路
        if (entity.getFactor2().contains("2")) {
            String resultLayerName6 = jsonObject6.getString("resultLayerName6");
            if (PlatformObjectUtils.isNotEmpty(resultLayerName6)) {
                BigDecimal min6 = jsonObject6.getBigDecimal("min6");
                BigDecimal max6 = jsonObject6.getBigDecimal("max6");
                if (max6.compareTo(min6) != 0) {
                    BigDecimal subtract = max6.subtract(min6);
                    countString = countString + "+(([dzzhTifCount." + resultLayerName6 + "]-" + min6 + ")/" + subtract + ")*" + factorMap.get(
                            "c2");
                }
            }
        }
        //地震动
        for (Map.Entry<String, JSONObject> a : mapMap7.entrySet()) {
            JSONObject value = a.getValue();
            String resultLayerName6 = value.getString("resultLayerName");
            BigDecimal min = value.getBigDecimal("min");
            BigDecimal max = value.getBigDecimal("max");
            //获取赋值后的最大最小值
            Map<String,BigDecimal> maxMinMap = getMaxMin(max,min,"C7");
            min = maxMinMap.get("min");
            max = maxMinMap.get("max");
            String countString6 = countString;
            if (PlatformObjectUtils.isNotEmpty(resultLayerName6)) {
                if (max.compareTo(min) != 0) {
                    BigDecimal subtract = max.subtract(min);
                    countString6 = countString6 + "+((Con([dzzhTifCount." + resultLayerName6 + "]>=0.75,10," +
                            "Con([dzzhTifCount." + resultLayerName6 + "]>=0.38,7,Con([dzzhTifCount." + resultLayerName6 + "]>=0.19,4," +
                            "Con([dzzhTifCount." + resultLayerName6 + "]>=0.09,2,Con([dzzhTifCount." + resultLayerName6 + "]>=0.04,1," +
                            "0)))))-" + min + ")/" + subtract + ")*" + factorMap.get("c3");
                }
            }
            if (PlatformObjectUtils.isEmpty(countString)) {
                countString6 = countString6.substring(1);
            } else if (PlatformObjectUtils.isEmpty(countString6)) {
                countString6 = "Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0,0)";
            }
            //栅格计算
            Map<String, Object> map2 = new HashMap<>();
            map2.put("type", "countMapsTif");
            String resultLayerName = "层次分析法模型_最终计算_" + currTime + "_a" + a.getKey();//裁剪计算结果数据集
            map2.put("layerName", resultLayerName);
            map2.put("mapsName", "层次模板-图例.xml");
            map2.put("countString", countString6);
            hypergraphService.getServerToken(map2);
            jsonArray2.add(resultLayerName);
        }
        jsonObject.put("resultLayerName", jsonArray2);
        return jsonObject;
    }*/

    private Map getMaxMin(BigDecimal max, BigDecimal min, String type) {
        Map<String, BigDecimal> map = new HashMap<>();
        if (type.equals("C1")) {
            BigDecimal maxValue = getC1Value(max);
            BigDecimal minValue = getC1Value(min);
            map.put("max", maxValue);
            map.put("min", minValue);
        }
        if (type.equals("C7")) {
            BigDecimal maxValue = getC7Value(max);
            BigDecimal minValue = getC7Value(min);
            map.put("max", maxValue);
            map.put("min", minValue);
        }


        return map;
    }

    //坡度
    private BigDecimal getC1Value(BigDecimal num) {
        BigDecimal value = new BigDecimal("0");
        if (num.compareTo(new BigDecimal("0")) > -1 && num.compareTo(new BigDecimal("10")) == -1) {
            return new BigDecimal("1");
        }
        if (num.compareTo(new BigDecimal("10")) > -1 && num.compareTo(new BigDecimal("25")) == -1) {
            return new BigDecimal("2");
        }
        if (num.compareTo(new BigDecimal("25")) > -1 && num.compareTo(new BigDecimal("40")) == -1) {
            return new BigDecimal("4");
        }
        if (num.compareTo(new BigDecimal("40")) > -1 && num.compareTo(new BigDecimal("60")) == -1) {
            return new BigDecimal("7");
        }
        if (num.compareTo(new BigDecimal("60")) > -1) {
            return new BigDecimal("10");
        }
        return value;
    }

    //地震动
    private BigDecimal getC7Value(BigDecimal num) {
        BigDecimal value = new BigDecimal("0");
        if (num.compareTo(new BigDecimal("0.04")) > -1 && num.compareTo(new BigDecimal("0.09")) == -1) {
            return new BigDecimal("1");
        }
        if (num.compareTo(new BigDecimal("0.09")) > -1 && num.compareTo(new BigDecimal("0.19")) == -1) {
            return new BigDecimal("2");
        }
        if (num.compareTo(new BigDecimal("0.19")) > -1 && num.compareTo(new BigDecimal("0.38")) == -1) {
            return new BigDecimal("4");
        }
        if (num.compareTo(new BigDecimal("0.38")) > -1 && num.compareTo(new BigDecimal("0.75")) == -1) {
            return new BigDecimal("7");
        }
        if (num.compareTo(new BigDecimal("0.75")) > -1) {
            return new BigDecimal("10");
        }
        return value;
    }


    private String saveDzzhModelCcfx(DzzhModelCcfx entity) {
        entity.setId(UUIDGenerator.getUUID());
        eqLandslideRepository.saveDzzhModelCcfx(entity);
        return entity.getId();
    }

    private Map<String, BigDecimal[][]> getBigDecimals(DzzhModelCcfx entity) {
        Map<String, BigDecimal[][]> map = new HashMap<>();
        map.put("a1,a2", new BigDecimal[][]{{BigDecimal.ONE, entity.getNum1().divide(entity.getNum2(), 4, BigDecimal.ROUND_UP)}
                , {entity.getNum2().divide(entity.getNum1(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});

        String factor1 = entity.getFactor1();
        String[] split1 = factor1.split(",");
        if (split1.length == 1) {
            map.put("b", new BigDecimal[][]{{BigDecimal.ONE}});
        } else if (split1.length == 2) {
            if (factor1.contains("1,2") || factor1.contains("2,1")) {
                map.put("b1,b2", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum3().divide(entity.getNum4(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum4().divide(entity.getNum3(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("1,3") || factor1.contains("3,1")) {
                map.put("b1,b3", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum5().divide(entity.getNum6(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum6().divide(entity.getNum5(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("1,4") || factor1.contains("4,1")) {
                map.put("b1,b4", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum7().divide(entity.getNum8(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum8().divide(entity.getNum7(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("2,3") || factor1.contains("3,2")) {
                map.put("b2,b3", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum9().divide(entity.getNum10(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum10().divide(entity.getNum9(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("2,4") || factor1.contains("4,2")) {
                map.put("b2,b4", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum11().divide(entity.getNum12(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum12().divide(entity.getNum11(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("3,4") || factor1.contains("4,3")) {
                map.put("b3,b4", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum13().divide(entity.getNum14(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum14().divide(entity.getNum13(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            }
        } else if (split1.length == 3) {
            if (factor1.contains("1") && factor1.contains("2") && factor1.contains("3")) {
                map.put("b1,b2,b3", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum3().divide(entity.getNum4(), 4, BigDecimal.ROUND_UP), entity.getNum5().divide(entity.getNum6(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum4().divide(entity.getNum3(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE, entity.getNum9().divide(entity.getNum10(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum6().divide(entity.getNum5(), 4, BigDecimal.ROUND_UP), entity.getNum10().divide(entity.getNum9(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("1") && factor1.contains("2") && factor1.contains("4")) {
                map.put("b1,b2,b4", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum3().divide(entity.getNum4(), 4, BigDecimal.ROUND_UP), entity.getNum7().divide(entity.getNum8(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum4().divide(entity.getNum3(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE, entity.getNum11().divide(entity.getNum12(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum8().divide(entity.getNum7(), 4, BigDecimal.ROUND_UP), entity.getNum12().divide(entity.getNum11(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("1") && factor1.contains("4") && factor1.contains("3")) {
                map.put("b1,b3,b4", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum5().divide(entity.getNum6(), 4, BigDecimal.ROUND_UP), entity.getNum7().divide(entity.getNum8(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum6().divide(entity.getNum5(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE, entity.getNum13().divide(entity.getNum14(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum8().divide(entity.getNum7(), 4, BigDecimal.ROUND_UP), entity.getNum14().divide(entity.getNum13(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor1.contains("4") && factor1.contains("2") && factor1.contains("3")) {
                map.put("b2,b3,b4", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum9().divide(entity.getNum10(), 4, BigDecimal.ROUND_UP), entity.getNum11().divide(entity.getNum12(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum10().divide(entity.getNum9(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE, entity.getNum13().divide(entity.getNum14(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum12().divide(entity.getNum11(), 4, BigDecimal.ROUND_UP), entity.getNum14().divide(entity.getNum13(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            }
        } else if (split1.length == 4) {
            map.put("b1,b2,b3,b4", new BigDecimal[][]{
                    {BigDecimal.ONE, entity.getNum3().divide(entity.getNum4(), 4, BigDecimal.ROUND_UP),
                            entity.getNum5().divide(entity.getNum6(), 4, BigDecimal.ROUND_UP), entity.getNum7().divide(entity.getNum8(), 4, BigDecimal.ROUND_UP)}
                    , {entity.getNum4().divide(entity.getNum3(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE,
                    entity.getNum9().divide(entity.getNum10(), 4, BigDecimal.ROUND_UP), entity.getNum11().divide(entity.getNum12(), 4, BigDecimal.ROUND_UP)}
                    , {entity.getNum6().divide(entity.getNum5(), 4, BigDecimal.ROUND_UP), entity.getNum10().divide(entity.getNum9(), 4, BigDecimal.ROUND_UP),
                    BigDecimal.ONE, entity.getNum13().divide(entity.getNum14(), 4, BigDecimal.ROUND_UP)}
                    , {entity.getNum8().divide(entity.getNum7(), 4, BigDecimal.ROUND_UP), entity.getNum12().divide(entity.getNum11(), 4, BigDecimal.ROUND_UP),
                    entity.getNum14().divide(entity.getNum13(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
        }
        String factor2 = entity.getFactor2();
        String[] split2 = factor2.split(",");
        if (split2.length == 1) {
            map.put("c", new BigDecimal[][]{{BigDecimal.ONE}});
        } else if (split2.length == 2) {
            if (factor2.contains("1,2") || factor2.contains("2,1")) {
                map.put("c1,c2", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum15().divide(entity.getNum16(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum16().divide(entity.getNum15(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor2.contains("1,3") || factor2.contains("3,1")) {
                map.put("c1,c3", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum17().divide(entity.getNum18(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum18().divide(entity.getNum17(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            } else if (factor2.contains("2,3") || factor2.contains("3,2")) {
                map.put("c2,c3", new BigDecimal[][]{
                        {BigDecimal.ONE, entity.getNum19().divide(entity.getNum20(), 4, BigDecimal.ROUND_UP)}
                        , {entity.getNum20().divide(entity.getNum19(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
            }
        } else if (split2.length == 3) {
            map.put("c1,c2,c3", new BigDecimal[][]{
                    {BigDecimal.ONE, entity.getNum15().divide(entity.getNum16(), 4, BigDecimal.ROUND_UP),
                            entity.getNum17().divide(entity.getNum18(), 4, BigDecimal.ROUND_UP)}
                    , {entity.getNum16().divide(entity.getNum15(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE,
                    entity.getNum19().divide(entity.getNum20(), 4, BigDecimal.ROUND_UP)}
                    , {entity.getNum18().divide(entity.getNum17(), 4, BigDecimal.ROUND_UP),
                    entity.getNum20().divide(entity.getNum19(), 4, BigDecimal.ROUND_UP), BigDecimal.ONE}});
        }
        return map;
    }

    private Map<String, BigDecimal> getFactor(String key, BigDecimal[][] bigDecimals) {
        Map<String, BigDecimal> map = new HashMap<>();
        if (bigDecimals.length == 1) {
            map.put(key, BigDecimal.ONE);
        } else if (bigDecimals.length == 2) {
            BigDecimal b1 = bigDecimals[0][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[0][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]), 12, BigDecimal.ROUND_HALF_UP));
            BigDecimal b2 = bigDecimals[1][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[1][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]), 12, BigDecimal.ROUND_HALF_UP));

            BigDecimal c1 = b1.divide(b1.add(b2), 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(2), 12,
                    BigDecimal.ROUND_HALF_UP);
            BigDecimal c2 = b2.divide(b1.add(b2), 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(2), 12,
                    BigDecimal.ROUND_HALF_UP);
            String[] split = key.split(",");
            map.put(split[0], b1.divide(b1.add(b2), 12, BigDecimal.ROUND_HALF_UP));
            map.put(split[1], b2.divide(b1.add(b2), 12, BigDecimal.ROUND_HALF_UP));
        } else if (bigDecimals.length == 3) {
            BigDecimal b1 = bigDecimals[0][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[0][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[0][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]), 12, BigDecimal.ROUND_HALF_UP));
            BigDecimal b2 = bigDecimals[1][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[1][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[1][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]), 12, BigDecimal.ROUND_HALF_UP));
            BigDecimal b3 = bigDecimals[2][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[2][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[2][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]), 12, BigDecimal.ROUND_HALF_UP));

            BigDecimal sum = b1.add(b2).add(b3);
            BigDecimal c1 = b1.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(3), 12,
                    BigDecimal.ROUND_HALF_UP);
            BigDecimal c2 = b2.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(3), 12,
                    BigDecimal.ROUND_HALF_UP);
            BigDecimal c3 = b3.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(3), 12,
                    BigDecimal.ROUND_HALF_UP);
            //验证，归一化权重
            BigDecimal Aw1 = bigDecimals[0][0].multiply(c1).add(bigDecimals[0][1].multiply(c2)).add(bigDecimals[0][2].multiply(c3));
            BigDecimal Aw2 = bigDecimals[1][0].multiply(c1).add(bigDecimals[1][1].multiply(c2)).add(bigDecimals[1][2].multiply(c3));
            BigDecimal Aw3 = bigDecimals[2][0].multiply(c1).add(bigDecimals[2][1].multiply(c2)).add(bigDecimals[2][2].multiply(c3));
            BigDecimal lamda = (Aw1.divide(c1, 12, BigDecimal.ROUND_HALF_UP).add(Aw2.divide(c2, 12, BigDecimal.ROUND_HALF_UP))
                    .add(Aw3.divide(c3, 12, BigDecimal.ROUND_HALF_UP))).divide(BigDecimal.valueOf(3), 12, BigDecimal.ROUND_HALF_UP);
            BigDecimal CI = (lamda.subtract(BigDecimal.valueOf(3))).divide((BigDecimal.valueOf(3).subtract(BigDecimal.ONE)), 12, RoundingMode.HALF_DOWN);
            BigDecimal ri = getRi(3);
            BigDecimal divide = CI.divide(ri, 12, RoundingMode.HALF_DOWN);
            if (divide.compareTo(BigDecimal.valueOf(0.1)) < 0) {
                //校验通过
                map.put("status", BigDecimal.ONE);
            } else {
                //校验不通过
                map.put("status", BigDecimal.ZERO);
            }
            String[] split = key.split(",");
            map.put(split[0], b1.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
            map.put(split[1], b2.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
            map.put(split[2], b3.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
        } else if (bigDecimals.length == 4) {
            BigDecimal b1 = bigDecimals[0][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]).add(bigDecimals[3][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[0][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]).add(bigDecimals[3][1]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[0][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]).add(bigDecimals[3][2]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[0][3].divide(bigDecimals[0][3].add(bigDecimals[1][3]).add(bigDecimals[2][3]).add(bigDecimals[3][3]), 12, BigDecimal.ROUND_HALF_UP));
            BigDecimal b2 = bigDecimals[1][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]).add(bigDecimals[3][0]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[1][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]).add(bigDecimals[3][1]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[1][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]).add(bigDecimals[3][2]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[1][3].divide(bigDecimals[0][3].add(bigDecimals[1][3]).add(bigDecimals[2][3]).add(bigDecimals[3][0]), 12, BigDecimal.ROUND_HALF_UP));
            BigDecimal b3 = bigDecimals[2][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]).add(bigDecimals[3][1]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[2][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]).add(bigDecimals[3][2]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[2][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]).add(bigDecimals[3][3]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[2][3].divide(bigDecimals[0][3].add(bigDecimals[1][3]).add(bigDecimals[2][3]).add(bigDecimals[3][0]), 12, BigDecimal.ROUND_HALF_UP));
            BigDecimal b4 = bigDecimals[3][0].divide(bigDecimals[0][0].add(bigDecimals[1][0]).add(bigDecimals[2][0]).add(bigDecimals[3][1]), 12, BigDecimal.ROUND_HALF_UP)
                    .add(bigDecimals[3][1].divide(bigDecimals[0][1].add(bigDecimals[1][1]).add(bigDecimals[2][1]).add(bigDecimals[3][2]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[3][2].divide(bigDecimals[0][2].add(bigDecimals[1][2]).add(bigDecimals[2][2]).add(bigDecimals[3][3]), 12, BigDecimal.ROUND_HALF_UP))
                    .add(bigDecimals[3][3].divide(bigDecimals[0][3].add(bigDecimals[1][3]).add(bigDecimals[2][3]).add(bigDecimals[3][0]), 12, BigDecimal.ROUND_HALF_UP));

            BigDecimal sum = b1.add(b2).add(b3).add(b4);
            BigDecimal c1 = b1.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(4), 12,
                    BigDecimal.ROUND_HALF_UP);
            BigDecimal c2 = b2.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(4), 12,
                    BigDecimal.ROUND_HALF_UP);
            BigDecimal c3 = b3.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(4), 12,
                    BigDecimal.ROUND_HALF_UP);
            BigDecimal c4 = b4.divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(4), 12,
                    BigDecimal.ROUND_HALF_UP);
            //验证，归一化权重
            BigDecimal Aw1 =
                    bigDecimals[0][0].multiply(c1).add(bigDecimals[0][1].multiply(c2)).add(bigDecimals[0][2].multiply(c3)).add(bigDecimals[0][3].multiply(c4));
            BigDecimal Aw2 = bigDecimals[1][0].multiply(c1).add(bigDecimals[1][1].multiply(c2)).add(bigDecimals[1][2].multiply(c3)).add(bigDecimals[1][3].multiply(c4));
            BigDecimal Aw3 = bigDecimals[2][0].multiply(c1).add(bigDecimals[2][1].multiply(c2)).add(bigDecimals[2][2].multiply(c3)).add(bigDecimals[2][3].multiply(c4));
            BigDecimal Aw4 = bigDecimals[3][0].multiply(c1).add(bigDecimals[3][1].multiply(c2)).add(bigDecimals[3][2].multiply(c3)).add(bigDecimals[3][3].multiply(c4));
            BigDecimal lamda = (Aw1.divide(c1, 12, BigDecimal.ROUND_HALF_UP).add(Aw2.divide(c2, 12, BigDecimal.ROUND_HALF_UP))
                    .add(Aw3.divide(c3, 12, BigDecimal.ROUND_HALF_UP)).add(Aw4.divide(c4, 12, BigDecimal.ROUND_HALF_UP)))
                    .divide(BigDecimal.valueOf(4), 12, BigDecimal.ROUND_HALF_UP);
            BigDecimal CI = (lamda.subtract(BigDecimal.valueOf(4))).divide((BigDecimal.valueOf(4).subtract(BigDecimal.ONE)), 12, RoundingMode.HALF_DOWN);
            BigDecimal ri = getRi(4);
            BigDecimal divide = CI.divide(ri, 12, RoundingMode.HALF_DOWN);
            if (divide.compareTo(BigDecimal.valueOf(0.1)) < 0) {
                //校验通过
                map.put("status", BigDecimal.ONE);
            } else {
                //校验不通过
                map.put("status", BigDecimal.ZERO);
            }
            String[] split = key.split(",");
            map.put(split[0], b1.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
            map.put(split[1], b2.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
            map.put(split[2], b3.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
            map.put(split[3], b4.divide(sum, 12, BigDecimal.ROUND_HALF_UP));
        } else {
            //当bigDecimals.length>=5时
            List<BigDecimal> list = new ArrayList<>(bigDecimals.length);
            BigDecimal b1 = BigDecimal.ZERO;
            BigDecimal sum = BigDecimal.ZERO;
            BigDecimal temp = BigDecimal.ZERO;
            int t = 0;
            int y = 0;
            for (int i = 0; i < bigDecimals.length; i++) {
                temp = bigDecimals[y][t];
                BigDecimal centerResult = BigDecimal.ZERO;
                for (int j = 0; j < bigDecimals.length; j++) {
                    centerResult = centerResult.add(bigDecimals[j][i]);
                }
                BigDecimal newResult = (temp.divide(centerResult, 12, BigDecimal.ROUND_HALF_UP));
                b1 = b1.add(newResult);
                if (t == bigDecimals.length - 1) {
                    t = 0;
                    ++y;
                    i = -1;
                    list.add(b1);
                    sum = sum.add(b1);
                    b1 = BigDecimal.ZERO;
                }
                t++;
//                if(i<bigDecimals.length-1){
//                    t++;
//                }else{
//                    i = 0;
//                    y = 1;
//                    t = 0;
//                    list.add(b1);
//                    sum = sum.add(b1);
//                    b1 = BigDecimal.ZERO;
//                }
            }
            String[] split = key.split(",");
            if (list.size() > 0) {
                for (int i = 0; i < list.size(); i++) {
                    BigDecimal c1 = list.get(i).divide(sum, 12, BigDecimal.ROUND_HALF_UP).divide(BigDecimal.valueOf(list.size()), 12,
                            BigDecimal.ROUND_HALF_UP);
                    map.put(split[i], c1);
                }
            }
        }
        return map;
    }

    private BigDecimal getRi(int num) {
        switch (String.valueOf(num)) {
            case "3":
                return BigDecimal.valueOf(0.58);
            case "4":
                return BigDecimal.valueOf(0.90);
            case "5":
                return BigDecimal.valueOf(1.12);
            case "6":
                return BigDecimal.valueOf(1.24);
            case "7":
                return BigDecimal.valueOf(1.32);
            case "8":
                return BigDecimal.valueOf(1.41);
            case "9":
                return BigDecimal.valueOf(1.52);
            default:
                return BigDecimal.ZERO;
        }
    }

    private BigDecimal getParam3(String key) {
        BigDecimal param3 = BigDecimal.ZERO;
        switch (key) {
            case "1":
                param3 = BigDecimal.valueOf(1);
                break;
            case "2":
                param3 = BigDecimal.valueOf(0.67);
                break;
            case "3":
                param3 = BigDecimal.valueOf(0.33);
                break;
            case "4":
                param3 = BigDecimal.valueOf(0.11);
                break;
            case "5":
                param3 = BigDecimal.valueOf(0);
                break;
        }
        return param3;
    }

    //2黄土高原模型
    @Override
    @Transactional(rollbackFor = Exception.class)
    public RestResponse count4(DzzhModelLoessPlateau entity) {
        RestResponse restResponse = null;
        String taskId = entity.getTaskId();
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        try {
            Map<String, String> areaIdCodeMap = (Map<String, String>) JSON.parse((String) redisTemplate.opsForValue().get(FxfzConstants.CACHE_SYS_KEY + "areaIdCodeMap"));

            TaskEntity taskEntity = taskService.viewTask(taskId);
            String geom = taskEntity.getGeom();
            if (PlatformObjectUtils.isNotEmpty(redisTemplate.opsForValue().get(taskId))) {
                return RestResponse.fail("该线程中的该数据:{}" + taskEntity.getTaskName() + "正在被计算！");
            }
            redisTemplate.opsForValue().set(taskId, "2", 10L, TimeUnit.SECONDS);
            String dtmId = UUIDGenerator.getUUID();
            entity.setDtmId(dtmId + ",");
            String modelId = saveDzzhModelLoessPlateau(entity);

            //查询数据库矢量数据到dzzhTifCount--任务区域
            String currTime = System.currentTimeMillis() + "";
            String clipLayerName = "temp_dzzh任务_" + currTime;
            String sqlFilter = "dt_id='" + taskId + "'";
            Map<String, Object> map0 = new HashMap<>();
            map0.put("type", "dzzhVectorQuery");
            map0.put("layerName", "dzzh_task");
            map0.put("resultLayerName", clipLayerName);
            map0.put("sqlFilter", sqlFilter);
            map0.put("queryType", "fxfzdzzh");
            RestResponse serverToken0 = hypergraphService.getServerToken(map0);
            if (serverToken0.getCode() != 200) {
                return RestResponse.fail(503, "计算失败！");
            }

            RestResponse serverToken1 = vectorTif("vectorTif-2", "jc_stratum_lithology", clipLayerName,
                    "values1", "data_version='" + entity.getLithologyVersion() + "' and del_flag='0'"
                    , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
            if (serverToken1.getCode() != 200) {
                return RestResponse.fail("该区域没有" + entity.getLithologyVersion() + "版本的地层岩性数据");
            }
            String resultLayerName1 = JSON.parseObject(JSON.toJSONString(serverToken1.getData()))
                    .getString("resultLayerName");
            System.out.println(entity.getLithologyVersion() + "版本的地层岩性:{}" + resultLayerName1 + "解析成功！");

            RestResponse serverToken2 = vectorTif("vectorTif-1", "dzzh_sys_landform", clipLayerName,
                    "values1", "data_version='" + entity.getPhyVersion() + "' and del_flag='0'"
                    , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
            if (serverToken2.getCode() != 200) {
                return RestResponse.fail("该区域没有" + entity.getPhyVersion() + "版本的地貌数据");
            }
            String resultLayerName2 = JSON.parseObject(JSON.toJSONString(serverToken2.getData()))
                    .getString("resultLayerName");
            System.out.println(entity.getPhyVersion() + "版本的地貌:{}" + resultLayerName2 + "解析成功！");

            //气候---------
            eqLandslideRepository.delDzzhJcClimate(taskId);
            String url = basicDataUrl + "/datacenter/jcClimate/getByArea?1=1";
            for (String s1 : taskEntity.getTaskDistrict().split(",")) {
                if (s1 != null) {
                    DzzhJcClimate dzzhJcClimate = new DzzhJcClimate();
                    String[] s2 = s1.split("-");
                    String divisionIds = null;
                    try {
                        if (s2.length == 1) {
                            url = url + "&province=" + s2[0] + "&city=&county=";
                            divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                    null, null);
                        } else if (s2.length == 2 || s2.length == 3) {
                            url = url + "&province=" + s2[0] + "&city=" + s2[1] + "&county=";
                            if (s2.length == 2) {
                                divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                        URLEncoder.encode(s2[1], "UTF-8"), null);
                            } else if (s2.length == 3) {
                                divisionIds = sysAreaService.getIdByCondition(URLEncoder.encode(s2[0], "UTF-8"),
                                        URLEncoder.encode(s2[1], "UTF-8"), URLEncoder.encode(s2[2], "UTF-8"));
                            }
                        }
                    } catch (UnsupportedEncodingException e) {
                        e.printStackTrace();
                    }
                    if (divisionIds == null) {
                        return null;
                    }
                    String geomText = null;
                    String[] split = divisionIds.split("_");
                    if (split.length == 1) {
                        geomText = getGeomByCode(areaIdCodeMap.get(split[0]), "1");
                    } else if (split.length == 2) {
                        geomText = getGeomByCode(areaIdCodeMap.get(split[1]), "2");
                    } else {
                        geomText = getGeomByCode(areaIdCodeMap.get(split[2]), "3");
                    }
                    if (PlatformObjectUtils.isEmpty(geomText)) {
                        return RestResponse.fail("未获取到气候数据的" + s2.toString() + "的空间数据，无法进行计算！");
                    }
                    JSONArray jsonArray = (JSONArray) drillService.getJCSJ(url, 3).getData();
                    if (jsonArray == null) {
                        return RestResponse.fail("该区域没有气候数据");
                    }
                    BigDecimal jyl = BigDecimal.ZERO;
                    for (int i = 0; i < jsonArray.size(); i++) {
                        JSONObject jsonObject = jsonArray.getJSONObject(i);
                        jyl = jyl.add(jsonObject.getBigDecimal("avPrec"));
                    }
                    dzzhJcClimate.setId(UUIDGenerator.getUUID());
                    dzzhJcClimate.setAvPrec(jyl);
                    dzzhJcClimate.setGeomText(geomText);
                    dzzhJcClimate.setTaskId(taskId);
                    eqLandslideRepository.saveDzzhJcClimate(dzzhJcClimate);
                }
            }
            // 刷新指定超图工作空间数据源数据集
            boolean b2 = hypergraphService.getWorkspaceReload("dzzh_jc_climate");
            RestResponse serverToken3 = vectorTif("vectorTif-1", "dzzh_jc_climate", clipLayerName,
                    "av_prec", "task_id='" + taskId + "' "
                    , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
            if (serverToken3.getCode() != 200) {
                return RestResponse.fail("该区域没有气候数据");
            }
            String resultLayerName3 = JSON.parseObject(JSON.toJSONString(serverToken3.getData()))
                    .getString("resultLayerName");
            System.out.println("气候:{}" + resultLayerName3 + "解析成功！");

            RestResponse serverToken4 = vectorTif("vectorTif-1", "dzzh_sys_natural_density", clipLayerName,
                    "dsn_name", "data_version='" + entity.getNatDenVersion() + "' and del_flag='0'"
                    , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
            if (serverToken4.getCode() != 200) {
                return RestResponse.fail("该区域没有" + entity.getNatDenVersion() + "版本的天然密度数据");
            }
            String resultLayerName4 = JSON.parseObject(JSON.toJSONString(serverToken4.getData()))
                    .getString("resultLayerName");
            System.out.println(entity.getNatDenVersion() + "版本的天然密度:{}" + resultLayerName4 + "解析成功！");

            String resultLayerName5 = "";
            String resultLayerName6 = "";
            String resultLayerName7 = "";
            if (PlatformObjectUtils.isNotEmpty(entity.getA1Version())) {
                RestResponse serverToken5 = vectorTif("vectorTif-2", "jc_pga_002", clipLayerName,
                        "year50_pro_exceedance002", "data_version='" + entity.getA1Version() + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken5.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getA1Version() + "版本的地震峰值加速度数据");
                }
                resultLayerName5 = JSON.parseObject(JSON.toJSONString(serverToken5.getData()))
                        .getString("resultLayerName");
                System.out.println(entity.getA1Version() + "版本的地震峰值加速度（2%）:{}" + resultLayerName5 + "解析成功！");
            }

            if (PlatformObjectUtils.isNotEmpty(entity.getA2Version())) {
                RestResponse serverToken6 = vectorTif("vectorTif-2", "jc_pga_010", clipLayerName,
                        "year50_pro_exceedance_010", "data_version='" + entity.getA2Version() + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken6.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getA2Version() + "版本的地震峰值加速度数据");
                }
                resultLayerName6 = JSON.parseObject(JSON.toJSONString(serverToken6.getData()))
                        .getString("resultLayerName");
                System.out.println(entity.getA2Version() + "版本的地震峰值加速度（10%）:{}" + resultLayerName6 + "解析成功！");
            }

            if (PlatformObjectUtils.isNotEmpty(entity.getA3Version())) {
                RestResponse serverToken7 = vectorTif("vectorTif-2", "jc_pga_063", clipLayerName,
                        "year50_pro_exceedance_063", "data_version='" + entity.getA3Version() + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken7.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getA3Version() + "版本的地震峰值加速度数据");
                }
                resultLayerName7 = JSON.parseObject(JSON.toJSONString(serverToken7.getData()))
                        .getString("resultLayerName");
                System.out.println(entity.getA3Version() + "版本的地震峰值加速度（63%）:{}" + resultLayerName7 + "解析成功！");
            }

            Map<String, Object> map = new HashMap<>();
            map.put("entity", entity);
            map.put("resultLayerName1", resultLayerName1);
            map.put("resultLayerName2", resultLayerName2);
            map.put("resultLayerName3", resultLayerName3);
            map.put("resultLayerName4", resultLayerName4);
            map.put("resultLayerName5", resultLayerName5);
            map.put("resultLayerName6", resultLayerName6);
            map.put("resultLayerName7", resultLayerName7);
            map.put("currTime", currTime);
            map.put("clipLayerName", clipLayerName);

            restResponse = getDataBySlopeVersion1(geom, entity.getSlopeVersion(), taskId,
                    dtmId, "4", map);
            if (restResponse.getCode() != 200) {
                return restResponse;
            }
            DzzhTaskModel dzzhTaskModel2 = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
            if (PlatformObjectUtils.isEmpty(dzzhTaskModel2)) {
                saveDzzhTaskModel(taskId, dtmId, "2","","");
            } else {
                dzzhTaskModel2.setEvaluateStatus("2");
                dzzhTaskModel2.setResultStatus("2");
                dzzhTaskModel2.setUpdateUser(PlatformSessionUtils.getUserId());
                dzzhTaskModel2.setUpdateTime(PlatformDateUtils.getCurrentTimestamp());
                dzzhTaskModel2.setModelType("2");
                eqLandslideRepository.updateDzzhTaskModel1(dzzhTaskModel2);
            }

            JSONArray jsonArray = JSON.parseObject(restResponse.getData().toString()).getJSONArray("resultLayerName");
            for (int i = 0; i < jsonArray.size(); i++) {
                String fileName = jsonArray.getString(i);
                DzzhResultLoessPlateau loessPlateau = new DzzhResultLoessPlateau();
                loessPlateau.setId(UUIDGenerator.getUUID());
                String s = fileName.split("_")[fileName.split("_").length - 1];
                loessPlateau.setProExceed(s);
                /*if ("1".equals(s)) {
                    loessPlateau.setProExceed("50年超越概率2%");
                } else if ("2".equals(s)) {
                    loessPlateau.setProExceed("50年超越概率10%");
                } else {
                    loessPlateau.setProExceed("50年超越概率63%");
                }*/
                loessPlateau.setFilePath(fileName);
                loessPlateau.setTaskId(taskId);
                loessPlateau.setModelId(modelId);
                eqLandslideRepository.saveDzzhResultLoessPlateau(loessPlateau);
            }

            restResponse.setMessage("计算成功！");
            return restResponse;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (restResponse != null && restResponse.getCode() == 200) {
                delOldAndSaveNew(dzzhTaskModel, taskId);
                redisTemplate.delete(taskId);
            }

        }
        return RestResponse.fail("计算失败！");
    }

    private RestResponse count4Detail(String resultLayerName1, Map<String, Object> map, String currTime) {
        JSONObject jsonObject = new JSONObject();
        JSONArray jsonArray2 = new JSONArray();
        DzzhModelLoessPlateau entity = (DzzhModelLoessPlateau) map.get("entity");
        String resultLayerName2 = (String) map.get("resultLayerName1");
        String resultLayerName3 = (String) map.get("resultLayerName2");
        String resultLayerName4 = (String) map.get("resultLayerName3");
        String resultLayerName5 = (String) map.get("resultLayerName4");
        String resultLayerName6 = (String) map.get("resultLayerName5");
        String resultLayerName7 = (String) map.get("resultLayerName6");
        String resultLayerName8 = (String) map.get("resultLayerName7");
        String countString1 = "Con([dzzhTifCount." + resultLayerName1 + "]==-9999,0,Con([dzzhTifCount." + resultLayerName1 + "]>=30,6,Con([dzzhTifCount." + resultLayerName1 + "]>=24,5,Con([dzzhTifCount." + resultLayerName1 + "]>=18,4,Con([dzzhTifCount." + resultLayerName1 + "]>=12,3,Con([dzzhTifCount." + resultLayerName1 + "]>=5,2,1))))))";
        //栅格计算
        String resultLayerName_1 = "temp_dzzh黄土高原模型计算_" + currTime + "_" + 1;//坡度
        RestResponse restResponse1 = countTif("countTif2", countString1, resultLayerName_1);
        if (restResponse1.getCode() != 200) {
            return restResponse1;
        }
        //复制到服务器的工作空间上
        RestResponse restResponse11 = coypTif1("coypTif1", resultLayerName_1, resultLayerName_1);
        if (restResponse11.getCode() != 200) {
            return restResponse11;
        }

        String countString2 = "Con([dzzhTifCount." + resultLayerName2 + "]==-9999,0,Con([dzzhTifCount." +
                resultLayerName2 + "]<12,5,Con([dzzhTifCount." + resultLayerName2 + "]==12,3,Con([dzzhTifCount." +
                resultLayerName2 + "]<15,1,0))))";
        String resultLayerName_2 = "temp_dzzh黄土高原模型计算_" + currTime + "_" + 2;//地层时代
        RestResponse restResponse2 = countTif("countTif2", countString2, resultLayerName_2);
        if (restResponse2.getCode() != 200) {
            return restResponse2;
        }
        //复制到服务器的工作空间上
        RestResponse restResponse22 = coypTif1("coypTif1", resultLayerName_2, resultLayerName_2);
        if (restResponse22.getCode() != 200) {
            return restResponse22;
        }

        String countString3 = "Con([dzzhTifCount." + resultLayerName3 + "]==-9999,0,Con([dzzhTifCount." + resultLayerName3 + "]<=3,1,Con([dzzhTifCount." + resultLayerName3 + "]<=7,4,0)))";
        String resultLayerName_3 = "temp_dzzh黄土高原模型计算_" + currTime + "_" + 4;//地貌
        RestResponse restResponse3 = countTif("countTif2", countString3, resultLayerName_3);
        if (restResponse3.getCode() != 200) {
            return restResponse3;
        }
        //复制到服务器的工作空间上
        RestResponse restResponse33 = coypTif1("coypTif1", resultLayerName_3, resultLayerName_3);
        if (restResponse33.getCode() != 200) {
            return restResponse33;
        }

        String countString4 = "Con([dzzhTifCount." + resultLayerName4 + "]==-9999,0,Con([dzzhTifCount." + resultLayerName4 + "]>=600,6,Con([dzzhTifCount." + resultLayerName4 + "]>=500,5,Con([dzzhTifCount." + resultLayerName4 + "]>=400,4,Con([dzzhTifCount." + resultLayerName4 + "]>=300,3,Con([dzzhTifCount." + resultLayerName4 + "]>=200,2,1))))))";
        String resultLayerName_4 = "temp_dzzh黄土高原模型计算_" + currTime + "_" + 3;//降雨量
        RestResponse restResponse4 = countTif("countTif2", countString4, resultLayerName_4);
        if (restResponse4.getCode() != 200) {
            return restResponse4;
        }
        //复制到服务器的工作空间上
        RestResponse restResponse44 = coypTif1("coypTif1", resultLayerName_4, resultLayerName_4);
        if (restResponse44.getCode() != 200) {
            return restResponse44;
        }
        String countString5 = "Con([dzzhTifCount." + resultLayerName5 + "]==-9999,0,Con([dzzhTifCount." + resultLayerName5 + "]>=1.8,1,Con([dzzhTifCount." + resultLayerName5 + "]>=1.7,2,Con([dzzhTifCount." + resultLayerName5 + "]>=1.6,3,Con([dzzhTifCount." + resultLayerName5 + "]>=1.5,4,Con([dzzhTifCount." + resultLayerName5 + "]>=1.4,5,6))))))";
        String resultLayerName_5 = "temp_dzzh黄土高原模型计算_" + currTime + "_" + 5;//天然密度
        RestResponse restResponse5 = countTif("countTif2", countString5, resultLayerName_5);
        if (restResponse5.getCode() != 200) {
            return restResponse5;
        }
        //复制到服务器的工作空间上
        RestResponse restResponse55 = coypTif1("coypTif1", resultLayerName_5, resultLayerName_5);
        if (restResponse55.getCode() != 200) {
            return restResponse55;
        }
        for (int i1 = 1; i1 < 4; i1++) {
            String resultLayerName_7 = "";
            if (i1 == 1) {
                if (PlatformObjectUtils.isEmpty(resultLayerName6)) {
                    continue;
                }
                resultLayerName_7 = resultLayerName6;
            } else if (i1 == 2) {
                if (PlatformObjectUtils.isEmpty(resultLayerName7)) {
                    continue;
                }
                resultLayerName_7 = resultLayerName7;
            } else {
                if (PlatformObjectUtils.isEmpty(resultLayerName8)) {
                    continue;
                }
                resultLayerName_7 = resultLayerName8;
            }
            String countString7 = "Con([dzzhTifCount." + resultLayerName_7 + "]>=0.6,6,Con([dzzhTifCount." + resultLayerName_7 +
                    "]>0.4,5,Con([dzzhTifCount." + resultLayerName_7 + "]>0.25,4,Con([dzzhTifCount." + resultLayerName_7 + "]>0.15,3,Con([dzzhTifCount." + resultLayerName_7 + "]>=0.05,2,1)))))";
            String resultLayerName_a7 = "temp_dzzh黄土高原模型计算_" + currTime + "_a" + i1;//地震动峰值加速度
            RestResponse restResponse_7 = countTif("countTif2", countString7, resultLayerName_a7);
            if (restResponse_7.getCode() != 200) {
                return restResponse_7;
            }
            //复制到服务器的工作空间上
            RestResponse restResponse77 = coypTif1("coypTif1", resultLayerName_a7, resultLayerName_a7);
            if (restResponse77.getCode() != 200) {
                return restResponse77;
            }
            //服务器栅格计算
            String countString6 = "[testdzzhTifCount." + resultLayerName_1 + "]*" + entity.getSlopeNum() +
                    "+[testdzzhTifCount." + resultLayerName_2 + "]*" + entity.getLithologyNum() +
                    "+[testdzzhTifCount." + resultLayerName_3 + "]*" + entity.getPhyNum() +
                    "+[testdzzhTifCount." + resultLayerName_4 + "]*" + entity.getClimateNum() +
                    "+[testdzzhTifCount." + resultLayerName_5 + "]*" + entity.getNatDenNum() +
                    "+[testdzzhTifCount." + resultLayerName_a7 + "]*" + entity.getAvNum();
            String resultLayerName_s = "黄土高原模型_最终计算_" + currTime + "_a" + i1;//裁剪计算结果数据集
            RestResponse restResponse6 = countTif("countTif3", countString6, resultLayerName_s);
            if (restResponse6.getCode() != 200) {
                return restResponse6;
            }
            //结果复制栅格数据集dzzh
            RestResponse restResponse66 = coypTif1("coypTif2", resultLayerName_s, resultLayerName_s);
            if (restResponse66.getCode() != 200) {
                return restResponse66;
            }
            //发布图层
            Map<String, Object> map2 = new HashMap<>();
            map2.put("type", "countMapsTif1");
            //数据集
            map2.put("layerName", resultLayerName_s);
            map2.put("mapsName", "黄土模板-图例.xml");
            RestResponse restResponse7 = hypergraphService.getServerToken(map2);
            if (restResponse7.getCode() != 200) {
                return restResponse7;
            }
            jsonArray2.add(resultLayerName_s);
        }

       /* //服务器栅格计算
        String countString6 = "[testdzzhTifCount." + resultLayerName_1 + "]*" + entity.getSlopeNum() +
                "+[testdzzhTifCount." + resultLayerName_2 + "]*" + entity.getLithologyNum() +
                "+[testdzzhTifCount." + resultLayerName_3 + "]*" + entity.getPhyNum() +
                "+[testdzzhTifCount." + resultLayerName_4 + "]*" + entity.getClimateNum() +
                "+[testdzzhTifCount." + resultLayerName_5 + "]*" + entity.getNatDenNum();
        String resultLayerName_6 = "temp_dzzh黄土高原模型计算_" + currTime + "_" + 6;//裁剪计算结果数据集
        RestResponse restResponse6 = countTif("countTif3", countString6, resultLayerName_6);
        if (restResponse6.getCode() != 200) {
            return restResponse6;
        }
        //结果复制栅格数据集dzzh
        RestResponse restResponse66 = coypTif1("coypTif2", resultLayerName_6, resultLayerName_6);
        if (restResponse66.getCode() != 200) {
            return restResponse66;
        }

        for (int i1 = 1; i1 < 4; i1++) {
            String resultLayerName_7 = "";
            if (i1 == 1) {
                resultLayerName_7 = resultLayerName6;
            } else if (i1 == 2) {
                resultLayerName_7 = resultLayerName7;
            } else {
                resultLayerName_7 = resultLayerName8;
            }
            String countString7 = "Con([dzzhTifCount." + resultLayerName_7 + "]>=0.6,6,Con([dzzhTifCount." + resultLayerName_7 +
                    "]>0.4,5,Con([dzzhTifCount." + resultLayerName_7 + "]>0.25,4,Con([dzzhTifCount." + resultLayerName_7 + "]>0.15,3,Con([dzzhTifCount." + resultLayerName_7 + "]>=0.05,2,1)))))";
            String resultLayerName_a7 = "temp_dzzh黄土高原模型计算_" + currTime + "_a" + i1;//地震动峰值加速度
            RestResponse restResponse_7 = countTif("countTif2", countString7, resultLayerName_a7);
            if (restResponse_7.getCode() != 200) {
                return restResponse_7;
            }
            //栅格计算
            String countString = "[dzzhTifCount." + resultLayerName_6 + "]+[dzzhTifCount." + resultLayerName_a7 + "]*" + entity.getAvNum();
            Map<String, Object> map2 = new HashMap<>();
            map2.put("type", "countMapsTif");
            String resultLayerName = "黄土高原模型_最终计算_" + currTime + "_a" + i1;//裁剪计算结果数据集
            map2.put("layerName", resultLayerName);
            map2.put("mapsName", "黄土模板-图例.xml");
            map2.put("countString", countString);
            RestResponse restResponse7 = hypergraphService.getServerToken(map2);
            if (restResponse7.getCode() != 200) {
                return restResponse7;
            }
            jsonArray2.add(resultLayerName);
        }*/
        jsonObject.put("resultLayerName", jsonArray2);

        return RestResponse.succeed(jsonObject);
    }

    private JSONArray getUrl(String versionCode, String geom) {
        JSONObject valueMap = new JSONObject();
        valueMap.put("versionCode", versionCode);
        valueMap.put("geom", geom);
        String url = basicDataUrl + "/datacenter/sysDataVersion/queryAllByVersionCode";
        JSONArray jsonArray = (JSONArray) drillService.getJCSJ(url, 5, valueMap).getData();
        return jsonArray;
    }

    public String getGeomByCode(String code, String flag) {
        HttpHeaders headers = new HttpHeaders();
        headers.add("Authorization", "dzzhfxpgyqhxt");
        headers.setContentType(MediaType.APPLICATION_JSON);
        HttpEntity<String> entity = new HttpEntity<>(code, headers);
        String url = null;
        if ("1".equals(flag)) {
            url = basicDataUrl + "/datacenter/jcBoua2/getGeomByCode";
        } else if ("2".equals(flag)) {
            url = basicDataUrl + "/datacenter/jcBoua4/getGeomByCode";
        } else {
            url = basicDataUrl + "/datacenter/jcBoua5/getGeomByCode";
        }
        try {
            JSONObject body = restTemplate.exchange(url, HttpMethod.POST, entity, JSONObject.class).getBody();
            if (body == null) {
                return null;
            }
            return (String) body.get("data");
        } catch (RestClientException e) {
            System.out.println("error -> url: {}" + url);
            throw e;
        }
    }

    private String saveDzzhModelLoessPlateau(DzzhModelLoessPlateau entity) {
        entity.setId(UUIDGenerator.getUUID());
        eqLandslideRepository.saveDzzhModelLoessPlateau(entity);
        return entity.getId();
    }

    //3指数模型
    @Override
    @Transactional(rollbackFor = Exception.class)
    public RestResponse count2(DzzhModelExponential entity) {
        RestResponse restResponse = null;
        String taskId = entity.getTaskId();
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        try {
            TaskEntity taskEntity = taskService.viewTask(taskId);
            String geom = taskEntity.getGeom();
            if (PlatformObjectUtils.isNotEmpty(redisTemplate.opsForValue().get(taskId))) {
                return RestResponse.fail("该线程中的该数据:{}" + taskEntity.getTaskName() + "正在被计算！");
            }
            redisTemplate.opsForValue().set(taskId, "3", 10L, TimeUnit.SECONDS);
            String dtmId = UUIDGenerator.getUUID();

            String currTime = System.currentTimeMillis() + "";

            Map<String, Object> map = new HashMap<>();
            map.put("currTime", currTime);
            restResponse = getDataBySlopeVersion1(geom, entity.getSlopeVersion(), taskId, dtmId, "2", map);
            if (restResponse.getCode() != 200) {
                return restResponse;
            }

            entity.setDtmId(dtmId + ",");
            String modelId = saveDzzhModelExponential(entity);
            DzzhTaskModel dzzhTaskModel2 = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
            if (PlatformObjectUtils.isEmpty(dzzhTaskModel2)) {
                saveDzzhTaskModel(taskId, dtmId, "3","","");
            } else {
                dzzhTaskModel2.setEvaluateStatus("2");
                dzzhTaskModel2.setResultStatus("2");
                dzzhTaskModel2.setUpdateUser(PlatformSessionUtils.getUserId());
                dzzhTaskModel2.setUpdateTime(PlatformDateUtils.getCurrentTimestamp());
                dzzhTaskModel2.setModelType("3");
                eqLandslideRepository.updateDzzhTaskModel1(dzzhTaskModel2);
            }

            JSONArray jsonArray = JSON.parseObject(restResponse.getData().toString()).getJSONArray("resultLayerName");
            for (int i = 0; i < jsonArray.size(); i++) {
                String fileName = jsonArray.getString(i);
                DzzhResultExponential resultExponential = new DzzhResultExponential();
                resultExponential.setId(UUIDGenerator.getUUID());
                resultExponential.setDzIntensity(fileName.split("_")[fileName.split("_").length - 1]);
                resultExponential.setFilePath(fileName);
                resultExponential.setTaskId(taskId);
                resultExponential.setModelId(modelId);
                eqLandslideRepository.saveDzzhResultExponential(resultExponential);
            }
            restResponse.setMessage("计算成功！");
            return restResponse;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (restResponse != null && restResponse.getCode() == 200) {
                delOldAndSaveNew(dzzhTaskModel, taskId);
                redisTemplate.delete(taskId);
            }

        }
        return RestResponse.fail("计算失败！");
    }

    private JSONObject count2Detail(String resultLayerName, String currTime) {
        JSONObject jsonObject = new JSONObject();
        JSONArray jsonArray2 = new JSONArray();
        for (int i1 = 6; i1 < 12; i1++) {
            String countString = "Con([dzzhTifCount." + resultLayerName + "]==-9999,-9999," +
                    "Con([dzzhTifCount." + resultLayerName + "]<1,2*(pow(10,(-10)))*exp(1.5508*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<3,2*(pow(10,(-7)))*exp(1.2596*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<7,3*(pow(10,(-7)))*exp(1.2683*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<10,1*(pow(10,(-6)))*exp(1.1574*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<15,2*(pow(10,(-6)))*exp(1.1368*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<20,3*(pow(10,(-6)))*exp(1.0869*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<25,6*(pow(10,(-6)))*exp(1.0558*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<30,1*(pow(10,(-5)))*exp(1.013*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<35,2*(pow(10,(-5)))*exp(0.9709*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<40,4*(pow(10,(-5)))*exp(0.9081*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<45,6*(pow(10,(-5)))*exp(0.8946*" + i1 + ")," +
                    "Con([dzzhTifCount." + resultLayerName + "]<50,9*(pow(10,(-5)))*exp(0.8621*" + i1 + ")," +
                    "3*(pow(10,(-5)))*exp(0.9743*" + i1 + "))))))))))))))";
            //栅格计算
            Map<String, Object> map2 = new HashMap<>();
            map2.put("type", "countMapsTif");
            String resultLayerName2 = "指数模型_计算_" + currTime + "_" + i1;//裁剪计算结果数据集
            map2.put("layerName", resultLayerName2);
            map2.put("mapsName", "指数-逻辑模板-图例.xml");
            map2.put("countString", countString);
            hypergraphService.getServerToken(map2);
            jsonArray2.add(resultLayerName2);
        }
        jsonObject.put("resultLayerName", jsonArray2);
        return jsonObject;
    }

    private String saveDzzhModelExponential(DzzhModelExponential entity) {
        entity.setId(UUIDGenerator.getUUID());
        eqLandslideRepository.saveDzzhModelExponential(entity);
        return entity.getId();
    }

    //4逻辑斯蒂模型
    @Override
    @Transactional(rollbackFor = Exception.class)
    public RestResponse count3(DzzhModelLogisticGrowth entity) {
        RestResponse restResponse = null;
        String taskId = entity.getTaskId();
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        try {
            TaskEntity taskEntity = taskService.viewTask(taskId);
            String geom = taskEntity.getGeom();
            if (PlatformObjectUtils.isNotEmpty(redisTemplate.opsForValue().get(taskId))) {
                return RestResponse.fail("该线程中的该数据:{}" + taskEntity.getTaskName() + "正在被计算！");
            }
            redisTemplate.opsForValue().set(taskId, "4", 10L, TimeUnit.SECONDS);
            String dtmId = UUIDGenerator.getUUID();

            String currTime = System.currentTimeMillis() + "";

            Map<String, Object> map = new HashMap<>();
            map.put("currTime", currTime);
            restResponse = getDataBySlopeVersion1(geom, entity.getSlopeVersion(), taskId, dtmId, "3", map);
            if (restResponse.getCode() != 200) {
                return restResponse;
            }

            entity.setDtmId(dtmId + ",");
            String modelId = saveDzzhModelLogisticGrowth(entity);
            DzzhTaskModel dzzhTaskModel2 = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
            if (PlatformObjectUtils.isEmpty(dzzhTaskModel2)) {
                saveDzzhTaskModel(taskId, dtmId, "4","","");
            } else {
                dzzhTaskModel2.setEvaluateStatus("2");
                dzzhTaskModel2.setResultStatus("2");
                dzzhTaskModel2.setUpdateUser(PlatformSessionUtils.getUserId());
                dzzhTaskModel2.setUpdateTime(PlatformDateUtils.getCurrentTimestamp());
                dzzhTaskModel2.setModelType("4");
                eqLandslideRepository.updateDzzhTaskModel1(dzzhTaskModel2);
            }

            JSONArray jsonArray = JSON.parseObject(restResponse.getData().toString()).getJSONArray("resultLayerName");
            for (int i = 0; i < jsonArray.size(); i++) {
                String fileName = jsonArray.getString(i);
                DzzhResultLogisticGrowth logisticGrowth = new DzzhResultLogisticGrowth();
                logisticGrowth.setId(UUIDGenerator.getUUID());
                logisticGrowth.setDzIntensity(fileName.split("_")[fileName.split("_").length - 1]);
                logisticGrowth.setFilePath(fileName);
                logisticGrowth.setTaskId(taskId);
                logisticGrowth.setModelId(modelId);
                if (taskEntity.getTaskName().contains("大屏展示")) {
                    logisticGrowth.setDpFlag("0");
                } else {
                    logisticGrowth.setDpFlag("1");
                }
                eqLandslideRepository.saveDzzhResultLogisticGrowth(logisticGrowth);
            }
            restResponse.setMessage("计算成功！");
            return restResponse;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (restResponse != null && restResponse.getCode() == 200) {
                delOldAndSaveNew(dzzhTaskModel, taskId);
                redisTemplate.delete(taskId);
            }

        }
        return RestResponse.fail("计算失败！");
    }

    private JSONObject count3Detail(String resultLayerName, String currTime) {
        JSONObject jsonObject = new JSONObject();
        JSONArray jsonArray3 = new JSONArray();
        for (int i1 = 6; i1 < 12; i1++) {
            String countString = "Con([dzzhTifCount." + resultLayerName + "]==-9999,-9999," +
                    "Con([dzzhTifCount." + resultLayerName + "]<1,1/(1+exp(8.22178-(0.34329*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<3,1/(1+exp(9.27905-(0.63448*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<7,1/(1+exp(11.3983-(0.916239*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<10,1/(1+exp(10.6025-(0.849128*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<15,1/(1+exp(10.3517-(0.852482*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<20,1/(1+exp(10.0078-(0.847624*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<25,1/(1+exp(9.19721-(0.789875*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<30,1/(1+exp(9.68142-(0.852073*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<35,1/(1+exp(9.65106-(0.8847*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<40,1/(1+exp(10.1154-(0.968573*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<45,1/(1+exp(10.1773-(1.012792*" + i1 + ")))," +
                    "Con([dzzhTifCount." + resultLayerName + "]<50,1/(1+exp(9.89683-(1.007739*" + i1 + ")))," +
                    "1/(1+exp(9.80003-(0.989653*" + i1 + "))))))))))))))))";
            //栅格计算
            Map<String, Object> map3 = new HashMap<>();
            map3.put("type", "countMapsTif");
            String resultLayerName3 = "逻辑斯蒂模型_计算_" + currTime + "_" + i1;//裁剪计算结果数据集
            map3.put("layerName", resultLayerName3);
            map3.put("mapsName", "指数-逻辑模板-图例.xml");
            map3.put("countString", countString);
            hypergraphService.getServerToken(map3);
            jsonArray3.add(resultLayerName3);
        }
        jsonObject.put("resultLayerName", jsonArray3);
        return jsonObject;
        // for (int i = 6; i < 12; i++) {
        //     DzzhResultExponential dzzhResultExponential = new DzzhResultExponential();
        //     dzzhResultExponential.setId(UUIDGenerator.getUUID());
        //     dzzhResultExponential.setGeomText(geomText);
        //     dzzhResultExponential.setDzIntensity(String.valueOf(i));
        //     dzzhResultExponential.setSlopeNumber(p);
        //     dzzhResultExponential.setTaskId(taskId);
        //
        //     BigDecimal Y = BigDecimal.ZERO;//滑坡频度值
        //     if (p.compareTo(BigDecimal.valueOf(1)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(8.22178).subtract(BigDecimal.valueOf(0.34329).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(1)) > 0 && p.compareTo(BigDecimal.valueOf(3)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(9.27905).subtract(BigDecimal.valueOf(0.63448).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(3)) > 0 && p.compareTo(BigDecimal.valueOf(7)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(11.3983).subtract(BigDecimal.valueOf(0.916239).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(7)) > 0 && p.compareTo(BigDecimal.valueOf(10)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(10.6025).subtract(BigDecimal.valueOf(0.849128).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(10)) > 0 && p.compareTo(BigDecimal.valueOf(15)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(10.3517).subtract(BigDecimal.valueOf(0.852482).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(15)) > 0 && p.compareTo(BigDecimal.valueOf(20)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(10.0078).subtract(BigDecimal.valueOf(0.847624).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(20)) > 0 && p.compareTo(BigDecimal.valueOf(25)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(9.19721).subtract(BigDecimal.valueOf(0.789875).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(25)) > 0 && p.compareTo(BigDecimal.valueOf(30)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(9.68142).subtract(BigDecimal.valueOf(0.852073).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(30)) > 0 && p.compareTo(BigDecimal.valueOf(35)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(9.65106).subtract(BigDecimal.valueOf(0.8847).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(35)) > 0 && p.compareTo(BigDecimal.valueOf(40)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(10.1154).subtract(BigDecimal.valueOf(0.968573).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(40)) > 0 && p.compareTo(BigDecimal.valueOf(45)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(10.1773).subtract(BigDecimal.valueOf(1.012792).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(45)) > 0 && p.compareTo(BigDecimal.valueOf(50)) < 1) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(9.89683).subtract(BigDecimal.valueOf(1.007739).multiply(new BigDecimal(i))).doubleValue())));
        //     } else if (p.compareTo(BigDecimal.valueOf(50)) > 0) {
        //         Y = BigDecimal.valueOf(1 / (1 + Math.pow(Math.E,
        //                 BigDecimal.valueOf(9.80003).subtract(BigDecimal.valueOf(0.989653).multiply(new BigDecimal(i))).doubleValue())));
        //     }
        //     dzzhResultExponential.setResultLevel(getResultLevel(Y));
        //     dzzhResultExponential.setExtends1(Y.toString());
        //     dzzhResultExponential.setModelId(modelId);
        //     eqLandslideRepository.saveDzzhResultExponential(dzzhResultExponential);
        // }
    }

    private String saveDzzhModelLogisticGrowth(DzzhModelLogisticGrowth entity) {
        entity.setId(UUIDGenerator.getUUID());
        eqLandslideRepository.saveDzzhModelLogisticGrowth(entity);
        return entity.getId();
    }

    private JSONObject getSetRedisData(String probability, String version) {
        RestResponse pga = basicLandformService.queryAllByVersionCode("pga", probability, version, "", null);
        JSONObject json = (JSONObject) JSON.toJSON(pga.getData());
        return json;
    }

    //5Newmark模型
    @Override
    @Transactional(rollbackFor = Exception.class)
    public RestResponse count5(DzzhModelNewmark entity) {
        RestResponse restResponse = null;
        String taskId = entity.getTaskId();
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        try {
            TaskEntity taskEntity = taskService.viewTask(taskId);
            String geom = taskEntity.getGeom();
            if (PlatformObjectUtils.isNotEmpty(redisTemplate.opsForValue().get(taskId))) {
                return RestResponse.fail("该线程中的该数据:{}" + taskEntity.getTaskName() + "正在被计算！");
            }
            redisTemplate.opsForValue().set(taskId, "5", 10L, TimeUnit.SECONDS);
            String dtmId = entity.getDtmId();
            if (PlatformObjectUtils.isEmpty(dtmId)) {
                dtmId = UUIDGenerator.getUUID();
            }

            //地质岩性组和参数信息
            List<LithologyGroup> groupList = entity.getGroupList();
            Map<String, String> lithologyMap = new HashMap<>(groupList.size());
            if (PlatformObjectUtils.isNotEmpty(groupList)) {
                for (LithologyGroup lithologyGroup : groupList) {
                    lithologyMap.put(lithologyGroup.getGroupName(), lithologyGroup.getCohesion() + "_" + lithologyGroup.getViscosityAngle() + "_" + lithologyGroup.getRockSoilWeight());
                }
            }

            //查询数据库矢量数据到dzzhTifCount--任务区域
            String currTime = System.currentTimeMillis() + "";
            String clipLayerName = "temp_dzzh任务_" + currTime;
            String sqlFilter = "dt_id='" + taskId + "'";
            Map<String, Object> map0 = new HashMap<>();
            map0.put("type", "dzzhVectorQuery");
            map0.put("layerName", "dzzh_task");
            map0.put("resultLayerName", clipLayerName);
            map0.put("sqlFilter", sqlFilter);
            map0.put("queryType", "fxfzdzzh");
            RestResponse serverToken0 = hypergraphService.getServerToken(map0);
            if (serverToken0.getCode() != 200) {
                return RestResponse.fail(503, "计算失败！");
            }
            //结果数据map
            Map<String, Object> map1 = new HashMap<>();
            eqLandslideRepository.deleteByVersion(taskId,entity.getLithologyVersion(),"2");
            //地层岩性
            RestResponse serverToken2 = vectorTif("vectorTif-1", "dzzh_lithology_merge_result", clipLayerName,
                    "values1", "lithology_version_code='" + entity.getLithologyVersion() + "' and task_id='" + taskId + "' and del_flag='0'"
                    , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
            if (serverToken2.getCode() != 200) {
                return RestResponse.fail("该区域没有" + entity.getLithologyVersion() + "版本的地层岩性数据");
            }
            String resultLayerName2 = JSON.parseObject(JSON.toJSONString(serverToken2.getData()))
                    .getString("resultLayerName");

            //地震峰值加速度-2%
            if (PlatformObjectUtils.isNotEmpty(entity.getA1Version())) {
                RestResponse serverToken5 = vectorTif("vectorTif-2", "jc_pga_002", clipLayerName,
                        "year50_pro_exceedance002", "data_version='" + entity.getA1Version() + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken5.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getA1Version() + "版本的地震峰值加速度数据");
                }
                String resultLayerName5 = JSON.parseObject(JSON.toJSONString(serverToken5.getData()))
                        .getString("resultLayerName");
                map1.put("resultLayerName5", resultLayerName5);
                System.out.println(entity.getA1Version() + "版本的地震峰值加速度（2%）:{}" + resultLayerName5 + "解析成功！");
            } else {
                map1.put("resultLayerName5", "");
            }

            //地震峰值加速度-10%
            if (PlatformObjectUtils.isNotEmpty(entity.getA2Version())) {
                RestResponse serverToken6 = vectorTif("vectorTif-2", "jc_pga_010", clipLayerName,
                        "year50_pro_exceedance_010", "data_version='" + entity.getA2Version() + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken6.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getA2Version() + "版本的地震峰值加速度数据");
                }
                String resultLayerName6 = JSON.parseObject(JSON.toJSONString(serverToken6.getData()))
                        .getString("resultLayerName");
                map1.put("resultLayerName6", resultLayerName6);
                System.out.println(entity.getA2Version() + "版本的地震峰值加速度（10%）:{}" + resultLayerName6 + "解析成功！");
            } else {
                map1.put("resultLayerName6", "");
            }

            //地震峰值加速度-63%
            if (PlatformObjectUtils.isNotEmpty(entity.getA3Version())) {
                RestResponse serverToken7 = vectorTif("vectorTif-2", "jc_pga_063", clipLayerName,
                        "year50_pro_exceedance_063", "data_version='" + entity.getA3Version() + "' and del_flag='0'"
                        , "task_num='" + taskEntity.getTaskNum() + "' and del_flag='0'", currTime);
                if (serverToken7.getCode() != 200) {
                    return RestResponse.fail("该区域没有" + entity.getA3Version() + "版本的地震峰值加速度数据");
                }
                String resultLayerName7 = JSON.parseObject(JSON.toJSONString(serverToken7.getData()))
                        .getString("resultLayerName");
                map1.put("resultLayerName7", resultLayerName7);
                System.out.println(entity.getA3Version() + "版本的地震峰值加速度（63%）:{}" + resultLayerName7 + "解析成功！");
            } else {
                map1.put("resultLayerName7", "");
            }
            map1.put("currTime", currTime);
            map1.put("entity", entity);
            map1.put("lithologyMap", lithologyMap);
            map1.put("resultLayerName2", resultLayerName2);

            restResponse = getDataBySlopeVersion1(geom, entity.getSlopeVersion(), taskId, dtmId, "5", map1);
            if (restResponse.getCode() != 200) {
                return restResponse;
            }

            entity.setDtmId(dtmId + ",");
            String modelId = saveDzzhModelNewmark(entity);
            //保存岩性组参数
            List<DzzhResultNewmarkLithologyGroup> lgList = new ArrayList<>();
            if (PlatformObjectUtils.isNotEmpty(groupList)) {
                String finalDtmId = dtmId;
                groupList.forEach(domain -> {
                    DzzhResultNewmarkLithologyGroup group = new DzzhResultNewmarkLithologyGroup();
                    group.setDrnlgId(UUIDGenerator.getUUID());
                    group.setDtmId(finalDtmId);
                    group.setGroupName(domain.getGroupName());
                    group.setCohesion(domain.getCohesion());
                    group.setViscosityAngle(domain.getViscosityAngle());
                    group.setRockSoilWeight(domain.getRockSoilWeight());
                    lgList.add(group);
                });
                saveDzzhResultNewmarkLithologyGroup(lgList);
            }
            DzzhTaskModel dzzhTaskModel2 = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
            if (PlatformObjectUtils.isEmpty(dzzhTaskModel2)) {
                saveDzzhTaskModel(taskId, dtmId, "5",entity.getAttachId(),entity.getLithology());
            } else {
                dzzhTaskModel2.setEvaluateStatus("2");
                dzzhTaskModel2.setResultStatus("2");
                dzzhTaskModel2.setUpdateUser(PlatformSessionUtils.getUserId());
                dzzhTaskModel2.setUpdateTime(PlatformDateUtils.getCurrentTimestamp());
                dzzhTaskModel2.setModelType("5");
                if(StringUtils.isNotBlank(entity.getAttachId())){
                    dzzhTaskModel2.setAttachId(entity.getAttachId());
                }else{
                    dzzhTaskModel2.setAttachId(dzzhTaskModel2.getAttachId());
                }
                if(StringUtils.isNotBlank(entity.getLithology())){
                    dzzhTaskModel2.setAttachId(entity.getLithology());
                }else{
                    dzzhTaskModel2.setAttachId(dzzhTaskModel2.getLithology());
                }
                eqLandslideRepository.updateDzzhTaskModel1(dzzhTaskModel2);
            }

            JSONArray jsonArray = JSON.parseObject(restResponse.getData().toString()).getJSONArray("resultLayerName");
            for (int i = 0; i < jsonArray.size(); i++) {
                String fileName = jsonArray.getString(i);
                DzzhResultNewmark newmark = new DzzhResultNewmark();
                newmark.setId(UUIDGenerator.getUUID());
                String s = fileName.split("_")[fileName.split("_").length - 1];
                newmark.setProExceed(s);
                newmark.setFilePath(fileName);
                newmark.setTaskId(taskId);
                newmark.setModelId(modelId);
                eqLandslideRepository.saveDzzhResultNewmark(newmark);
            }
            restResponse.setMessage("计算成功！");
            return restResponse;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (restResponse != null && restResponse.getCode() == 200) {
                delOldAndSaveNew(dzzhTaskModel, taskId);
                redisTemplate.delete(taskId);
            }

        }
        return RestResponse.fail("计算失败！");
    }

    private RestResponse count5Detail(String resultLayerName1, Map<String, Object> map, String currTime) {
        JSONObject jsonObject = new JSONObject();
        JSONArray jsonArray2 = new JSONArray();
        DzzhModelNewmark entity = (DzzhModelNewmark) map.get("entity");
        Map<String, String> lithologyMap = (Map<String, String>) map.get("lithologyMap");
        String resultLayerName2 = (String) map.get("resultLayerName2");
        String resultLayerName5 = (String) map.get("resultLayerName5");
        String resultLayerName6 = (String) map.get("resultLayerName6");
        String resultLayerName7 = (String) map.get("resultLayerName7");

        String countString1_1 = "Con([dzzhTifCount." + resultLayerName2 + "]==2," + (lithologyMap.get("较硬岩类") == null ? 0 : lithologyMap.get("较硬岩类").split("_")[0]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==4," + (lithologyMap.get("较软岩类") == null ? 0 : lithologyMap.get("较软岩类").split("_")[0]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==7," + (lithologyMap.get("软岩类") == null ? 0 : lithologyMap.get("软岩类").split("_")[0]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==1," + (lithologyMap.get("坚硬岩类") == null ? 0 : lithologyMap.get("坚硬岩类").split("_")[0]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==10," + (lithologyMap.get("极软岩类") == null ? 0 : lithologyMap.get("极软岩类").split("_")[0]) +
                ",0)))))";
        String resultLayerName8_1 = "NEWMARK模型_过程计算_" + currTime + "_1";//裁剪计算结果数据集
        RestResponse restResponse1 = countTif("countTif2", countString1_1, resultLayerName8_1);
        if (restResponse1.getCode() != 200) {
            return restResponse1;
        }
        String countString1_2 = "Con([dzzhTifCount." + resultLayerName2 + "]==2," + (lithologyMap.get("较硬岩类") == null ? 0 : lithologyMap.get("较硬岩类").split("_")[1]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==4," + (lithologyMap.get("较软岩类") == null ? 0 : lithologyMap.get("较软岩类").split("_")[1]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==7," + (lithologyMap.get("软岩类") == null ? 0 : lithologyMap.get("软岩类").split("_")[1]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==1," + (lithologyMap.get("坚硬岩类") == null ? 0 : lithologyMap.get("坚硬岩类").split("_")[1]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==10," + (lithologyMap.get("极软岩类") == null ? 0 : lithologyMap.get("极软岩类").split("_")[1]) +
                ",0)))))";
        String resultLayerName8_2 = "NEWMARK模型_过程计算_" + currTime + "_2";//裁剪计算结果数据集
        RestResponse restResponse2 = countTif("countTif2", countString1_2, resultLayerName8_2);
        if (restResponse2.getCode() != 200) {
            return restResponse2;
        }
        String countString1_3 = "Con([dzzhTifCount." + resultLayerName1 + "]<10,0,Con([dzzhTifCount." + resultLayerName1 + "]<30,5,Con([dzzhTifCount." + resultLayerName1 + "]<45,4," +
                "Con([dzzhTifCount." + resultLayerName1 + "]<60,3,Con([dzzhTifCount." + resultLayerName1 + "]>=60,2,0)))))";
        String resultLayerName8_3 = "NEWMARK模型_过程计算_" + currTime + "_3";//裁剪计算结果数据集
        RestResponse restResponse3 = countTif("countTif2", countString1_3, resultLayerName8_3);
        if (restResponse3.getCode() != 200) {
            return restResponse3;
        }
        String countString1_4 = "Con([dzzhTifCount." + resultLayerName8_1 + "]==0,0,Con([dzzhTifCount." + resultLayerName8_2 + "]==0,0," +
                "Con([dzzhTifCount." + resultLayerName8_3 + "]==0,0,Con(sin(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "])==0,0," +
                "Con([dzzhTifCount." + resultLayerName1 + "]<10,0,[dzzhTifCount." + resultLayerName8_1 + "]/([dzzhTifCount." + resultLayerName8_2 +
                "]*[dzzhTifCount." + resultLayerName8_3 + "]*sin(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "])))))))";
        String resultLayerName8_4 = "NEWMARK模型_过程计算_" + currTime + "_4";//裁剪计算结果数据集
        RestResponse restResponse4 = countTif("countTif2", countString1_4, resultLayerName8_4);
        if (restResponse4.getCode() != 200) {
            return restResponse4;
        }
        String countString1_5 = "Con([dzzhTifCount." + resultLayerName2 + "]==2," + (lithologyMap.get("较硬岩类") == null ? 0 : lithologyMap.get("较硬岩类").split("_")[2]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==4," + (lithologyMap.get("较软岩类") == null ? 0 : lithologyMap.get("较软岩类").split("_")[2]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==7," + (lithologyMap.get("软岩类") == null ? 0 : lithologyMap.get("软岩类").split("_")[2]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==1," + (lithologyMap.get("坚硬岩类") == null ? 0 : lithologyMap.get("坚硬岩类").split("_")[2]) +
                ",Con([dzzhTifCount." + resultLayerName2 + "]==10," + (lithologyMap.get("极软岩类") == null ? 0 : lithologyMap.get("极软岩类").split("_")[2]) +
                ",0)))))";
        String resultLayerName8_5 = "NEWMARK模型_过程计算_" + currTime + "_5";//裁剪计算结果数据集
        RestResponse restResponse5 = countTif("countTif2", countString1_5, resultLayerName8_5);
        if (restResponse5.getCode() != 200) {
            return restResponse5;
        }
        String countString1_6 = "Con([dzzhTifCount." + resultLayerName8_5 + "]==0,0,Con([dzzhTifCount." + resultLayerName1 + "]<10,0," +
                "Con(tan(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "])==0,0," +
                "tan(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName8_5 + "])/tan(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "]))))";
        String resultLayerName8_6 = "NEWMARK模型_过程计算_" + currTime + "_6";//裁剪计算结果数据集
        RestResponse restResponse6 = countTif("countTif2", countString1_6, resultLayerName8_6);
        if (restResponse6.getCode() != 200) {
            return restResponse6;
        }
        String countString1_7 = "Con([dzzhTifCount." + resultLayerName8_5 + "]==0,0,Con([dzzhTifCount." + resultLayerName8_2 + "]==0,0," +
                "Con(tan(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "])==0,0,Con([dzzhTifCount." + resultLayerName1 + "]<10,0," +
                "" + entity.getRockSoilChroma() + "*9.8*tan(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName8_5 + "]/" +
                "([dzzhTifCount." + resultLayerName8_2 + "]*tan(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "])))))))";
        String resultLayerName8_7 = "NEWMARK模型_过程计算_" + currTime + "_7";//裁剪计算结果数据集
        RestResponse restResponse7 = countTif("countTif2", countString1_7, resultLayerName8_7);
        if (restResponse7.getCode() != 200) {
            return restResponse7;
        }
        //Fs
        String countString1_8 = "([dzzhTifCount." + resultLayerName8_4 + "]+[dzzhTifCount." + resultLayerName8_6 + "]-[dzzhTifCount." + resultLayerName8_7 + "])";
        String resultLayerName8_8 = "NEWMARK模型_过程计算_" + currTime + "_8";//裁剪计算结果数据集
        RestResponse restResponse8 = countTif("countTif2", countString1_8, resultLayerName8_8);
        if (restResponse8.getCode() != 200) {
            return restResponse8;
        }
        //ac
        String countString1_9 = "Con([dzzhTifCount." + resultLayerName1 + "]<10,0,Con([dzzhTifCount." + resultLayerName8_8 +
                "]<=1,(1.01-1)*9.8*sin(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "]),(([dzzhTifCount." +
                resultLayerName8_8 + "])-1)*9.8*sin(" + Math.PI + "/180*[dzzhTifCount." + resultLayerName1 + "])))";
        String resultLayerName8_9 = "NEWMARK模型_过程计算_" + currTime + "_9";//裁剪计算结果数据集
        RestResponse restResponse9 = countTif("countTif2", countString1_9, resultLayerName8_9);
        if (restResponse9.getCode() != 200) {
            return restResponse9;
        }
        // jsonArray2.add(resultLayerName8);
        for (int i1 = 1; i1 < 4; i1++) {
            String resultLayerName_5 = "";
            if (i1 == 1 && PlatformObjectUtils.isNotEmpty(resultLayerName5)) {
                resultLayerName_5 = resultLayerName5;
            } else if (i1 == 2 && PlatformObjectUtils.isNotEmpty(resultLayerName6)) {
                resultLayerName_5 = resultLayerName6;
            } else if (i1 == 3 && PlatformObjectUtils.isNotEmpty(resultLayerName7)) {
                resultLayerName_5 = resultLayerName7;
            }
            if (PlatformObjectUtils.isNotEmpty(resultLayerName_5)) {
                String countString = "Con([dzzhTifCount." + resultLayerName1 + "]<10,0.01,Con([dzzhTifCount." + resultLayerName8_9 +
                        "]/([dzzhTifCount." + resultLayerName_5 + "]*9.8)>=1,0.01,0.272*(1-(exp(-0.13*pow(" + entity.getRatio1() +
                        "+log(pow(1-([dzzhTifCount." + resultLayerName8_9 + "]/([dzzhTifCount." + resultLayerName_5 + "]*9.8))," +
                        entity.getRatio2() + ")*" + "pow([dzzhTifCount." + resultLayerName8_9 + "]/([dzzhTifCount." + resultLayerName_5 +
                        "]*9.8)," + entity.getRatio3() + ")),0.908))))))";
                //栅格计算
                Map<String, Object> map2 = new HashMap<>();
                map2.put("type", "countMapsTif");
                String resultLayerName = "NEWMARK模型_最终计算_" + currTime + "_a" + i1;//裁剪计算结果数据集
                map2.put("layerName", resultLayerName);
                map2.put("mapsName", "newmark模板-图例.xml");
                map2.put("countString", countString);
                RestResponse serverToken1 = hypergraphService.getServerToken(map2);
                if (serverToken1.getCode() != 200) {
                    return serverToken1;
                }
                jsonArray2.add(resultLayerName);
            }

        }
        jsonObject.put("resultLayerName", jsonArray2);
        Map<String, Object> map3 = new HashMap<>();
        map3.put("type", "batchDelTif");
        String resultLayerName = "NEWMARK模型_过程计算_" + currTime + "_*";//裁剪计算结果数据集
        map3.put("layerName", resultLayerName);
        hypergraphService.getServerToken(map3);
        return RestResponse.succeed(jsonObject);
    }

    public RestResponse getDataBySlopeVersion1(String geom, String version, String taskId, String modelId, String tab
            , Map<String, Object> map1) {
        String resultLayerName1 = "";
        String currTime = (String) map1.get("currTime");
        if ("1".equals(tab) && !((String) map1.get("factor1")).contains("1")) {
            String clipLayerName = (String) map1.get("clipLayerName");
            //任务区域矢量转栅格
            String resultLayerName = "temp_dzzh_task_栅格_" + currTime;//结果栅格数据集
            Map<String, Object> map = new HashMap<>();
            map.put("type", "dzzhCountVectorTif");
            map.put("layerName", "dzzh_task");//矢量转栅格的数据集
            map.put("clipLayerName", clipLayerName);//边界数据集
            map.put("fieldName", "SmUserID");//用于生成栅格值的矢量数据的字段，默认用超图自身的字段SmUserID值为0
            map.put("resultLayerName", resultLayerName);//结果栅格数据集
            RestResponse serverToken = hypergraphService.getServerToken(map);
            resultLayerName1 = JSON.parseObject(JSON.toJSONString(serverToken.getData()))
                    .getString("resultLayerName");
        } else {
            SysDataVersion sysDataVersion = sysDataVersionRepository.getByVersionCode(version);
            AttachmentInfoEntity attach = attachmentInfoService.getAttach(sysDataVersion.getDataFileIds());
            if (PlatformObjectUtils.isEmpty(attach.getFileDirId())) {
                fileUrl = fileUrl + attach.getAttachPath();
            } else {
                fileUrl = attach.getFileDirId() + attach.getAttachPath();
            }
            Map<String, Object> map = new HashMap<>();
            //判断此版本下是否有数据----通过get请求获取请求得到最大最小值
            RestResponse code = drillService.getDataByDzzhTifFile(attach.getAttachName().contains(".tif") ? attach.getAttachName().split(".tif")[0] : attach.getAttachName());
            if (code.getCode() != 200) {
                return RestResponse.fail(500, "坡度数据" + version + "版本不存在，请重新选择");
            }
            //code不为200，数据范围不相交
            map.put("type", "clipTif2");//只裁剪栅格
            String layerName = attach.getAttachName();
            if (layerName.contains(".tif")) {
                layerName = layerName.split(".tif")[0];
            }
            map.put("layerName", layerName);
            resultLayerName1 = "temp_裁剪_" + currTime;//裁剪结果数据集
            map.put("resultLayerName", resultLayerName1);
            map.put("geomText", geom);
            map.put("sourceType", "dzzhTifFile");
            RestResponse serverToken = hypergraphService.getServerToken(map);
            if (serverToken.getCode() != 200) {
                return RestResponse.fail(401, "文件获取失败！");
            }
            if ("1".equals(tab)) {
                JSONObject jsonObject1 = new JSONObject();
                jsonObject1.put("resultLayerName1", resultLayerName1);
                map1.put("jsonObject1", jsonObject1);
            }
        }
        if ("1".equals(tab)) {
            return count1Detail(resultLayerName1, map1, currTime);
        } else if ("2".equals(tab)) {
            return RestResponse.succeed(count2Detail(resultLayerName1, currTime));
        } else if ("3".equals(tab)) {
            return RestResponse.succeed(count3Detail(resultLayerName1, currTime));
        } else if ("4".equals(tab)) {
            return count4Detail(resultLayerName1, map1, currTime);
        } else if ("5".equals(tab)) {
            return count5Detail(resultLayerName1, map1, currTime);
        }

        return RestResponse.succeed("失败！！！");
    }

    /**
     * @param type          vectorTif-1//矢量转栅格的数据集 vectorTif-2//与基础数据的矢量转栅格的数据集
     * @param layerName     矢量转栅格的数据集
     * @param clipLayerName 边界数据集
     * @param fieldName     用于生成栅格值的矢量数据的字段，默认用超图自身的字段SmUserID值为0
     * @param sqlFilter     过滤查询数据的表达式，即只保留不符合sql条件的数据
     * @param sqlFilterClip 边界过滤查询数据的表达式，即只保留符合sql条件的数据
     * @param currTime      时间戳
     * @return
     */
    private RestResponse vectorTif(String type, String layerName, String clipLayerName, String fieldName,
                                   String sqlFilter, String sqlFilterClip, String currTime) {
        Map<String, Object> map7 = new HashMap<>();
        map7.put("type", type);
        map7.put("layerName", layerName);//矢量转栅格的数据集
        map7.put("clipLayerName", clipLayerName);//边界数据集
        map7.put("fieldName", fieldName);//用于生成栅格值的矢量数据的字段，默认用超图自身的字段SmUserID值为0
        //过滤查询数据的表达式，即只保留不符合sql条件的数据
        map7.put("sqlFilter", sqlFilter);
        //边界过滤查询数据的表达式，即只保留符合sql条件的数据
        map7.put("sqlFilterClip", sqlFilterClip);
        map7.put("currTime", currTime);
        return hypergraphService.getServerToken(map7);
    }

    /**
     * @param type            countTif:栅格计算发布;countTif2:栅格计算;
     * @param countString
     * @param resultLayerName
     * @return
     */
    private RestResponse countTif(String type, String countString, String resultLayerName) {
        //栅格计算
        Map<String, Object> map2_1 = new HashMap<>();
        map2_1.put("type", type);
        map2_1.put("layerName", resultLayerName);
        map2_1.put("countString", countString);
        return hypergraphService.getServerToken(map2_1);
    }

    private RestResponse coypTif1(String type, String paramString, String resultLayerName) {
        //复制栅格数据集
        Map<String, Object> map = new HashMap<>();
        map.put("type", type);
        map.put("layerName", paramString);
        return hypergraphService.getServerToken(map);
    }

    @Override
    public Map<String, List<Map<String, String>>> getDataBySlopeVersion(String geom, String version) {
        SysDataVersion sysDataVersion = sysDataVersionRepository.getByVersionCode(version);
        AttachmentInfoEntity attach = attachmentInfoService.getAttach(sysDataVersion.getDataFileIds());
        if (PlatformObjectUtils.isEmpty(attach.getFileDirId())) {
            fileUrl = fileUrl + attach.getAttachPath();
        } else {
            fileUrl = attach.getFileDirId() + attach.getAttachPath();
        }
        // fileUrl=fileUrl.replace("\\",File.separator);
        if (sysDataVersion.getDataTypeLevel2().equals("dxdmpd")) {
            Map<String, List<Map<String, String>>> map = elevationMapService.getSlopeByGeomAndTIF1(geom, fileUrl);
            return map;
        } else if (sysDataVersion.getDataTypeLevel2().equals("dxdmpx")) {
            Map<String, List<Map<String, String>>> map = elevationMapService.getAspectByGeomAndTIF1(geom, fileUrl);
            return map;
        }
        return null;
    }

    private void saveDzzhResultNewmarkLithologyGroup(List<DzzhResultNewmarkLithologyGroup> lgList) {
        eqLandslideRepository.saveDzzhResultNewmarkLithologyGroup(lgList);
    }

    private String saveDzzhModelNewmark(DzzhModelNewmark entity) {
        entity.setId(UUIDGenerator.getUUID());
        eqLandslideRepository.saveDzzhModelNewmark(entity);
        return entity.getId();
    }

    private void saveDzzhTaskModel(String taskId, String dtmId, String modelType,String attachId,String lithology) {
        String userId = PlatformSessionContext.getUserID();
        DzzhTaskModel dzzhTaskModel = new DzzhTaskModel();
        dzzhTaskModel.setDtmId(dtmId);
        dzzhTaskModel.setModelType(modelType);
        dzzhTaskModel.setTaskId(taskId);
        dzzhTaskModel.setEvaluateStatus("2");
        dzzhTaskModel.setResultStatus("2");
        dzzhTaskModel.setEvaluateUser(userId);
        dzzhTaskModel.setEvaluateTime(PlatformDateUtils.getCurrentTimestamp());
        dzzhTaskModel.setAttachId(attachId);
        String user = sUserService.getSUser(userId);
        SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
        dzzhTaskModel.setEvaluateUserName(sUser.getUserName());
        dzzhTaskModel.setDelFlag("0");
        dzzhTaskModel.setCreateUser(userId);
        dzzhTaskModel.setLithology(lithology);
        dzzhTaskModel.setCreateTime(PlatformDateUtils.getCurrentTimestamp());
        eqLandslideRepository.saveDzzhTaskModel(dzzhTaskModel);
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void delDzzhTaskModel(String dtmId) {
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByDtmId(dtmId);
        if(dzzhTaskModel!=null){
            delOldAndSaveNew(dzzhTaskModel, dzzhTaskModel.getTaskId());
        }

    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void updateDzzhTaskModel1(String dtmId, String taskId, String tab) {
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        if(StringUtils.isNotBlank(dtmId)){
            dtmId = dtmId.replace(",", "");
        }

        if(dzzhTaskModel!=null){
            if (dtmId.equals(dzzhTaskModel.getDtmId())) {
                dzzhTaskModel.setDtmId(UUIDGenerator.getUUID());
                dzzhTaskModel.setEvaluateStatus("1");
                dzzhTaskModel.setResultStatus("1");
                String userId = PlatformSessionUtils.getUserId();
                dzzhTaskModel.setCreateUser(userId);
                dzzhTaskModel.setCreateTime(PlatformDateUtils.getCurrentTimestamp());
                eqLandslideRepository.saveDzzhTaskModel(dzzhTaskModel);
                switch (dzzhTaskModel.getModelType()) {
                    case "1":
                        DzzhModelCcfx modelDataByTaskId1 = eqLandslideRepository.getModelDataByTaskId1(taskId, dtmId);
                        String oldDtmId1 = modelDataByTaskId1.getDtmId();
                        String newDtmId1 = oldDtmId1 + dzzhTaskModel.getDtmId() + ",";
                        eqLandslideRepository.updateDtmIdByTaskId1(newDtmId1, taskId, oldDtmId1);
                        break;
                    case "2":
                        DzzhModelLoessPlateau modelDataByTaskId2 = eqLandslideRepository.getModelDataByTaskId2(taskId, dtmId);
                        String oldDtmId2 = modelDataByTaskId2.getDtmId();
                        String newDtmId2 = oldDtmId2 + dzzhTaskModel.getDtmId() + ",";
                        eqLandslideRepository.updateDtmIdByTaskId2(newDtmId2, taskId, oldDtmId2);
                        break;
                    case "3":
                        DzzhModelExponential modelDataByTaskId3 = eqLandslideRepository.getModelDataByTaskId3(taskId, dtmId);
                        String oldDtmId3 = modelDataByTaskId3.getDtmId();
                        String newDtmId3 = oldDtmId3 + dzzhTaskModel.getDtmId() + ",";
                        eqLandslideRepository.updateDtmIdByTaskId3(newDtmId3, taskId, oldDtmId3);
                        break;
                    case "4":
                        DzzhModelLogisticGrowth modelDataByTaskId4 = eqLandslideRepository.getModelDataByTaskId4(taskId, dtmId);
                        String oldDtmId4 = modelDataByTaskId4.getDtmId();
                        String newDtmId4 = oldDtmId4 + dzzhTaskModel.getDtmId() + ",";
                        eqLandslideRepository.updateDtmIdByTaskId4(newDtmId4, taskId, oldDtmId4);
                        break;
                    case "5":
                        DzzhModelNewmark modelDataByTaskId5 = eqLandslideRepository.getModelDataByTaskId5(taskId, dtmId);
                        String oldDtmId5 = modelDataByTaskId5.getDtmId();
                        String newDtmId5 = oldDtmId5 + dzzhTaskModel.getDtmId() + ",";
                        eqLandslideRepository.updateDtmIdByTaskId5(newDtmId5, taskId, oldDtmId5);
                        break;
                }
            }
        }
        if ("1".equals(tab)) {
            if(StringUtils.isNotBlank(dtmId)){
                String userId = PlatformSessionUtils.getUserId();
                Timestamp currentTimestamp = PlatformDateUtils.getCurrentTimestamp();
                eqLandslideRepository.delDzzhTaskModel(dtmId, userId, currentTimestamp);
            }
        }
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void updateDzzhTaskModel2(String dtmId, String flag, String taskId) {
        DzzhTaskModel dzzhTaskModel = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
        String userId = PlatformSessionUtils.getUserId();
        Timestamp currentTimestamp = PlatformDateUtils.getCurrentTimestamp();
        if ("1".equals(flag)) {
            //替换
            eqLandslideRepository.updateDzzhTaskModel(dzzhTaskModel.getDtmId(), "1", "1", userId, currentTimestamp);
        } else {
            delOldAndSaveNew(dzzhTaskModel, taskId);
        }
        eqLandslideRepository.updateDzzhTaskModel(dtmId, "2", "2", userId, currentTimestamp);
    }

    private void delOldAndSaveNew(DzzhTaskModel dzzhTaskModel, String taskId) {
        if (PlatformObjectUtils.isNotEmpty(dzzhTaskModel)) {
            String userId = PlatformSessionUtils.getUserId();
            Timestamp currentTimestamp = PlatformDateUtils.getCurrentTimestamp();
            eqLandslideRepository.delDzzhTaskModel(dzzhTaskModel.getDtmId(), userId, currentTimestamp);
            switch (dzzhTaskModel.getModelType()) {
                case "1":
                    DzzhModelCcfx modelDataByTaskId1 = eqLandslideRepository.getModelDataByTaskId1(taskId,
                            dzzhTaskModel.getDtmId());
                    String oldDtmId1 = modelDataByTaskId1.getDtmId();
                    String newDtmId1 = oldDtmId1.replace(dzzhTaskModel.getDtmId() + ",", "");
                    if (PlatformObjectUtils.isEmpty(newDtmId1)) {
                        eqLandslideRepository.delDzzhModelCcfx(modelDataByTaskId1.getId());
                        eqLandslideRepository.delDzzhResultCcfx(modelDataByTaskId1.getId());
                    } else {
                        eqLandslideRepository.updateDtmIdByTaskId1(newDtmId1, taskId, oldDtmId1);
                    }
                    break;
                case "2":
                    DzzhModelLoessPlateau modelDataByTaskId2 = eqLandslideRepository.getModelDataByTaskId2(taskId,
                            dzzhTaskModel.getDtmId());
                    String oldDtmId2 = modelDataByTaskId2.getDtmId();
                    String newDtmId2 = oldDtmId2.replace(dzzhTaskModel.getDtmId() + ",", "");
                    if (PlatformObjectUtils.isEmpty(newDtmId2)) {
                        eqLandslideRepository.delDzzhModelLoessPlateau(modelDataByTaskId2.getId());
                        eqLandslideRepository.delDzzhResultLoessPlateau(modelDataByTaskId2.getId());
                    } else {
                        eqLandslideRepository.updateDtmIdByTaskId2(newDtmId2, taskId, oldDtmId2);
                    }
                    break;
                case "3":
                    DzzhModelExponential modelDataByTaskId3 = eqLandslideRepository.getModelDataByTaskId3(taskId, dzzhTaskModel.getDtmId());
                    String oldDtmId3 = modelDataByTaskId3.getDtmId();
                    String newDtmId3 = oldDtmId3.replace(dzzhTaskModel.getDtmId() + ",", "");
                    if (PlatformObjectUtils.isEmpty(newDtmId3)) {
                        eqLandslideRepository.delDzzhModelExponential(modelDataByTaskId3.getId());
                        eqLandslideRepository.delDzzhResultExponential(modelDataByTaskId3.getId());
                    } else {
                        eqLandslideRepository.updateDtmIdByTaskId3(newDtmId3, taskId, oldDtmId3);
                    }
                    break;
                case "4":
                    DzzhModelLogisticGrowth modelDataByTaskId4 = eqLandslideRepository.getModelDataByTaskId4(taskId, dzzhTaskModel.getDtmId());
                    String oldDtmId4 = modelDataByTaskId4.getDtmId();
                    String newDtmId4 = oldDtmId4.replace(dzzhTaskModel.getDtmId() + ",", "");
                    if (PlatformObjectUtils.isEmpty(newDtmId4)) {
                        eqLandslideRepository.delDzzhModelLogisticGrowth(modelDataByTaskId4.getId());
                        eqLandslideRepository.delDzzhResultLogisticGrowth(modelDataByTaskId4.getId());
                    } else {
                        eqLandslideRepository.updateDtmIdByTaskId4(newDtmId4, taskId, oldDtmId4);
                    }
                    deleteDzzhCountAreaResultByTaskId(taskId, null, modelDataByTaskId4.getId());
                    break;
                case "5":
                    DzzhModelNewmark modelDataByTaskId5 = eqLandslideRepository.getModelDataByTaskId5(taskId, dzzhTaskModel.getDtmId());
                    String oldDtmId5 = modelDataByTaskId5.getDtmId();
                    String newDtmId5 = oldDtmId5.replace(dzzhTaskModel.getDtmId() + ",", "");
                    if (PlatformObjectUtils.isEmpty(newDtmId5)) {
                        eqLandslideRepository.delDzzhModelNewmark(modelDataByTaskId5.getId());
                        eqLandslideRepository.delDzzhResultNewmark(modelDataByTaskId5.getId());
                    } else {
                        eqLandslideRepository.updateDtmIdByTaskId5(newDtmId5, taskId, oldDtmId5);
                    }
                    break;
            }
        }
    }

    @Override
    public RestResponse getDzzhTaskModel(String taskId) {
        return RestResponse.succeed(eqLandslideRepository.getDzzhTaskModel(taskId));
    }

    @Override
    public RestResponse getTaskModelVOPage(TaskModelParam param, int curPage, int pageSize) {
        return RestResponse.succeed(eqLandslideRepository.getTaskModelVOPage(param, curPage, pageSize));
    }

    @Override
    public RestResponse getTaskModelDetail(String taskId, String modelType) {
        JSONObject jsonObject = new JSONObject();
        TaskEntity taskEntity = taskService.viewTask(taskId);
        String status = "1";
        switch (modelType) {
            case "1":
                TaskModelCcfxVO taskModelDetail1 = eqLandslideRepository.getTaskModelDetail1(taskId);
                if(taskModelDetail1 == null){
                    String userId = PlatformSessionContext.getUserID();
                    String user = sUserService.getSUser(userId);
                    SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
                    taskModelDetail1 = new TaskModelCcfxVO();
                    taskModelDetail1.setTaskId(taskEntity.getDtId());
                    taskModelDetail1.setTaskNum(taskEntity.getTaskNum());
                    taskModelDetail1.setTaskName(taskEntity.getTaskName());
                    taskModelDetail1.setTaskDistrict(taskEntity.getTaskDistrict());
                    taskModelDetail1.setDisasterType(taskEntity.getDisasterType());
                    taskModelDetail1.setEvaluateUserName(sUser.getUserName());
                    status = "0";
                }
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail1));
                jsonObject.put("status", status);
                break;
            case "2":
                TaskModelLoessPlateauVO taskModelDetail2 = eqLandslideRepository.getTaskModelDetail2(taskId);
                if(taskModelDetail2 == null){
                    String userId = PlatformSessionContext.getUserID();
                    String user = sUserService.getSUser(userId);
                    SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
                    taskModelDetail2 = new TaskModelLoessPlateauVO();
                    taskModelDetail2.setTaskId(taskEntity.getDtId());
                    taskModelDetail2.setTaskNum(taskEntity.getTaskNum());
                    taskModelDetail2.setTaskName(taskEntity.getTaskName());
                    taskModelDetail2.setTaskDistrict(taskEntity.getTaskDistrict());
                    taskModelDetail2.setDisasterType(taskEntity.getDisasterType());
                    taskModelDetail2.setEvaluateUserName(sUser.getUserName());
                    status = "0";
                }
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail2));
                jsonObject.put("status", status);
                break;
            case "3":
                TaskModelExponentialVO taskModelDetail3 = eqLandslideRepository.getTaskModelDetail3(taskId);
                if(taskModelDetail3 == null){
                    String userId = PlatformSessionContext.getUserID();
                    String user = sUserService.getSUser(userId);
                    SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
                    taskModelDetail3 = new TaskModelExponentialVO();
                    taskModelDetail3.setTaskId(taskEntity.getDtId());
                    taskModelDetail3.setTaskNum(taskEntity.getTaskNum());
                    taskModelDetail3.setTaskName(taskEntity.getTaskName());
                    taskModelDetail3.setTaskDistrict(taskEntity.getTaskDistrict());
                    taskModelDetail3.setDisasterType(taskEntity.getDisasterType());
                    taskModelDetail3.setEvaluateUserName(sUser.getUserName());
                    status = "0";
                }
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail3));
                jsonObject.put("status", status);
                break;
            case "4":
                TaskModelLogiGrowthVO taskModelDetail4 = eqLandslideRepository.getTaskModelDetail4(taskId);
                if(taskModelDetail4 == null){
                    String userId = PlatformSessionContext.getUserID();
                    String user = sUserService.getSUser(userId);
                    SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
                    taskModelDetail4 = new TaskModelLogiGrowthVO();
                    taskModelDetail4.setTaskId(taskEntity.getDtId());
                    taskModelDetail4.setTaskNum(taskEntity.getTaskNum());
                    taskModelDetail4.setTaskName(taskEntity.getTaskName());
                    taskModelDetail4.setTaskDistrict(taskEntity.getTaskDistrict());
                    taskModelDetail4.setDisasterType(taskEntity.getDisasterType());
                    taskModelDetail4.setEvaluateUserName(sUser.getUserName());
                    status = "0";
                }
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail4));
                jsonObject.put("status", status);
                break;
            case "5":
                String[] lithologyGroups = new String[]{"坚硬岩类", "较硬岩类", "较软岩类", "软岩类", "极软岩类"};
                TaskModelNewMarkVO taskModelNewMarkVO = eqLandslideRepository.getTaskModelDetail5(taskId);
                if(taskModelNewMarkVO == null){
                    String userId = PlatformSessionContext.getUserID();
                    String user = sUserService.getSUser(userId);
                    SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
                    taskModelNewMarkVO = new TaskModelNewMarkVO();
                    taskModelNewMarkVO.setTaskId(taskEntity.getDtId());
                    taskModelNewMarkVO.setTaskNum(taskEntity.getTaskNum());
                    taskModelNewMarkVO.setTaskName(taskEntity.getTaskName());
                    taskModelNewMarkVO.setTaskDistrict(taskEntity.getTaskDistrict());
                    taskModelNewMarkVO.setDisasterType(taskEntity.getDisasterType());
                    taskModelNewMarkVO.setEvaluateUserName(sUser.getUserName());
                    jsonObject = JSON.parseObject(JSON.toJSONString(taskModelNewMarkVO));
                    status = "0";
                    jsonObject.put("status",status);
                }else{
                    jsonObject = JSON.parseObject(JSON.toJSONString(taskModelNewMarkVO));
                    List<LithologyGroup> groupList = eqLandslideRepository.getLithologyGroup(taskModelNewMarkVO.getDtmId());
                    if (groupList.size() < 1) {
                        //地层岩性归并
                        List<String> lithologyGroupNames = eqLandslideRepository.getLithologyGroupName(taskId);
                        groupList = new ArrayList<>();
                        for (String name : lithologyGroupNames) {
                            LithologyGroup lithologyGroup = new LithologyGroup();
                            lithologyGroup.setGroupName(name);
                            groupList.add(lithologyGroup);
                        }
                        jsonObject.put("status", "1");
                    } else {
                        //评估计算
                        jsonObject.put("status", "0");
                    }
                    List<LithologyGroup> groupList1 = new ArrayList<>();
                    for (String lithology : lithologyGroups) {
                        for (LithologyGroup lithologyGroup : groupList) {
                            if (lithology.equals(lithologyGroup.getGroupName())) {
                                groupList1.add(lithologyGroup);
                                break;
                            }
                        }
                    }
                    jsonObject.put("groupList", groupList1);
                }
               // jsonObject = JSON.parseObject(JSON.toJSONString(taskModelNewMarkVO));

                break;
            default:

                jsonObject = JSON.parseObject(JSON.toJSONString(taskEntity));
                jsonObject.put("status", "2");
                jsonObject.put("evaluateUserName", taskEntity.getCreateUserName());
                break;
        }
        return RestResponse.succeed(jsonObject);
    }

    @Override
    public RestResponse getTaskModelDetailMil(String taskId, String dtmId, String modelType) {
        JSONObject jsonObject = new JSONObject();
        switch (modelType) {
            case "1":
                TaskModelCcfxVO taskModelDetail1 = eqLandslideRepository.getTaskModelDetail1Mil(taskId, dtmId);
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail1));
                jsonObject.put("status", "1");
                break;
            case "2":
                TaskModelLoessPlateauVO taskModelDetail2 = eqLandslideRepository.getTaskModelDetail2Mil(taskId, dtmId);
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail2));
                jsonObject.put("status", "1");
                break;
            case "3":
                TaskModelExponentialVO taskModelDetail3 = eqLandslideRepository.getTaskModelDetail3Mil(taskId, dtmId);
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail3));
                jsonObject.put("status", "1");
                break;
            case "4":
                TaskModelLogiGrowthVO taskModelDetail4 = eqLandslideRepository.getTaskModelDetail4Mil(taskId, dtmId);
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelDetail4));
                jsonObject.put("status", "1");
                break;
            case "5":
                String[] lithologyGroups = new String[]{"坚硬岩类", "较硬岩类", "较软岩类", "软岩类", "极软岩类"};
                TaskModelNewMarkVO taskModelNewMarkVO = eqLandslideRepository.getTaskModelDetail5Mil(taskId, dtmId);
                jsonObject = JSON.parseObject(JSON.toJSONString(taskModelNewMarkVO));
                List<LithologyGroup> groupList = eqLandslideRepository.getLithologyGroup(taskModelNewMarkVO.getDtmId());
                if (groupList.size() < 1) {
                    //地层岩性归并
                    List<String> lithologyGroupNames = eqLandslideRepository.getLithologyGroupName(taskId);
                    groupList = new ArrayList<>();
                    for (String name : lithologyGroupNames) {
                        LithologyGroup lithologyGroup = new LithologyGroup();
                        lithologyGroup.setGroupName(name);
                        groupList.add(lithologyGroup);
                    }
                    jsonObject.put("status", "1");
                } else {
                    //评估计算
                    jsonObject.put("status", "0");
                }
                List<LithologyGroup> groupList1 = new ArrayList<>();
                for (String lithology : lithologyGroups) {
                    for (LithologyGroup lithologyGroup : groupList) {
                        if (lithology.equals(lithologyGroup.getGroupName())) {
                            groupList1.add(lithologyGroup);
                            break;
                        }
                    }
                }
                jsonObject.put("groupList", groupList1);
                break;
            default:
                TaskEntity taskEntity = taskService.viewTask(taskId);
                jsonObject = JSON.parseObject(JSON.toJSONString(taskEntity));
                jsonObject.put("status", "2");
                jsonObject.put("evaluateUserName", taskEntity.getCreateUserName());
                break;
        }
        return RestResponse.succeed(jsonObject);
    }

    /**
     * 基础数据获取地层岩性数据
     */
    private List<JSONObject> getJcStratumLithologyList(String lithologtCode, String typeCode) {
        String url = basicDataUrl + "/datacenter/sysDataVersion/queryAllByVersionCode?versionCode=" +
                lithologtCode + "&typeCode=" + typeCode;
        JSONObject json = (JSONObject) JSON.toJSON(drillService.getJCSJ(url, 2).getData());
        List<JSONObject> list = (List<JSONObject>) json.get("data");
        return PlatformObjectUtils.isEmpty(list) ? new ArrayList<>() : list;
    }

    /**
     * typeCode 地层岩性标识
     * lithologtCode 地层岩性版本
     */
    @Override
    public RestResponse queryAllLithologyByVersionCode(Map<String, Object> param) {
        String taskId = (String) param.get("taskId");
        String lithologtCode = (String) param.get("lithologtCode");
        String typeCode = (String) param.get("typeCode");
        String modelType = (String) param.get("modelType");
        String userId = PlatformSessionUtils.getUserId();
        String user = sUserService.getSUser(userId);
        SUser sUser = CreateGson.createGson().fromJson(user, SUser.class);
        Map<String, Object> map = new HashMap<>();
        List<JSONObject> list = getJcStratumLithologyList(lithologtCode, typeCode);
        Set<String> set = new HashSet<>();
        if (PlatformObjectUtils.isNotEmpty(list)) {
            //先删除
            eqLandslideRepository.delete(taskId, lithologtCode, ResultEnum.FINA.getState() + "");
            List<LithologyMergeResult> results = new ArrayList<>(list.size());
            Map<String, String> lithologyMap = lithologyService.getLithologyMap();
            for (JSONObject jsonObject : list) {
                String lithologyType = lithologyMap.get(jsonObject.getString("mainCode"));
                int values1 = -1;
                if (PlatformObjectUtils.isNotEmpty(lithologyType)) {
                    switch (lithologyType) {
                        case "极软岩类":
                            values1 = 10;
                            break;
                        case "软岩类":
                            values1 = 7;
                            break;
                        case "较软岩类":
                            values1 = 4;
                            break;
                        case "较硬岩类":
                            values1 = 2;
                            break;
                        case "坚硬岩类":
                            values1 = 1;
                            break;
                    }
                    LithologyMergeResult result = LithologyMergeResult.builder()
                            .dlmrId(UUIDGenerator.getUUID()).lithologyVersionCode(lithologtCode).lithologyId(jsonObject.getString("id")).taskId(taskId)
                            .geom(jsonObject.getString("geom")).lithologyGroupName(lithologyType).values1(values1).resultStatus(ResultEnum.FINA.getState() + "")
                            .delFlag(YNEnum.N.toString()).createUser(userId).createTime(new Date()).operateType(ResultEnum.FINA.getState() + "").flag("1").build();
                    results.add(result);
                    if (StringUtils.isNotBlank(lithologyType)) {
                        set.add(lithologyType);
                    }
                    jsonObject.put("lithologyGroupName", lithologyType);
                }
            }
            redisTemplate.opsForList().rightPushAll(RedisKeyConstants.SYSTEMPREFIX + ":" + taskId + ":" + lithologtCode + ":lithology", results);
            String lithologyGroup = "";
            if (set.contains("坚硬岩类")) {
                lithologyGroup = lithologyGroup + ",坚硬岩类";
            }
            if (set.contains("较硬岩类")) {
                lithologyGroup = lithologyGroup + ",较硬岩类";
            }
            if (set.contains("较软岩类")) {
                lithologyGroup = lithologyGroup + ",较软岩类";
            }
            if (set.contains("软岩类")) {
                lithologyGroup = lithologyGroup + ",软岩类";
            }
            if (set.contains("极软岩类")) {
                lithologyGroup = lithologyGroup + ",极软岩类";
            }
            map.put("lithologyGroup", lithologyGroup.substring(1));

            DzzhTaskModel dzzhTaskModel1 = eqLandslideRepository.getDzzhTaskModelByTaskIdAndResultStatus(taskId, "2");
            DzzhTaskModel dzzhTaskModel = new DzzhTaskModel();
            String dtmId = UUIDGenerator.getUUID();
            dzzhTaskModel.setDtmId(dtmId);
            dzzhTaskModel.setTaskId(taskId);
            dzzhTaskModel.setEvaluateStatus("1");
            dzzhTaskModel.setResultStatus("2");
            dzzhTaskModel.setModelType(modelType);
            dzzhTaskModel.setEvaluateUser(userId);
            dzzhTaskModel.setEvaluateTime(PlatformDateUtils.getCurrentTimestamp());
            dzzhTaskModel.setEvaluateUserName(sUser.getUserName());
            dzzhTaskModel.setDelFlag("0");
            dzzhTaskModel.setCreateUser(userId);
            dzzhTaskModel.setCreateTime(PlatformDateUtils.getCurrentTimestamp());
            eqLandslideRepository.saveDzzhTaskModel(dzzhTaskModel);

            if ("5".equals(modelType)) {
                DzzhModelNewmark dzzhModelNewmark = JSON.parseObject(JSON.toJSONString(param), DzzhModelNewmark.class);
                dzzhModelNewmark.setDtmId(dtmId + ",");
                saveDzzhModelNewmark(dzzhModelNewmark);
            } else if ("1".equals(modelType)) {
                DzzhModelCcfx dzzhModelCcfx = JSON.parseObject(JSON.toJSONString(param), DzzhModelCcfx.class);
                dzzhModelCcfx.setDtmId(dtmId + ",");
                saveDzzhModelCcfx(dzzhModelCcfx);
            }
            if (dzzhTaskModel1 != null) {
                delOldAndSaveNew(dzzhTaskModel1, taskId);
            }
            if (results.size() > 0) {
                eqLandslideRepository.mergeGroupBatchSave(results);
            }
        } else {
            map.put("lithologyGroup", "");
        }
        return RestResponse.succeed(map);
    }

    /**
     * 层次分析
     *
     * @param taskId
     * @param proExceed
     * @return
     */
    @Override
    public List<DzzhResultCcfx> getDataByTaskId1(String taskId, String proExceed) {
        return eqLandslideRepository.getResultDataByTaskId1(taskId, proExceed);
    }

    /**
     * 黄土高原
     *
     * @param taskId
     * @param proExceed
     * @return
     */
    @Override
    public List<DzzhResultLoessPlateau> getDataByTaskId2(String taskId, String proExceed) {
        return eqLandslideRepository.getResultDataByTaskId2(taskId, proExceed);
    }

    /**
     * 指数
     *
     * @param taskId
     * @param dzIntensity
     * @return
     */
    @Override
    public List<DzzhResultExponential> getDataByTaskId3(String taskId, String dzIntensity) {
        return eqLandslideRepository.getResultDataByTaskId3(taskId, dzIntensity);
    }

    /**
     * 逻辑斯蒂
     *
     * @param taskId
     * @param dzIntensity
     * @return
     */
    @Override
    public List<DzzhResultLogisticGrowth> getDataByTaskId4(String taskId, String dzIntensity) {
        return eqLandslideRepository.getResultDataByTaskId4(taskId, dzIntensity);
    }

    /**
     * Newmark
     *
     * @param taskId
     * @param proExceed
     * @return
     */
    @Override
    public List<DzzhResultNewmark> getDataByTaskId5(String taskId, String proExceed) {
        return eqLandslideRepository.getResultDataByTaskId5(taskId, proExceed);
    }

    @Override
    public List<String> getDataByDzIntensityOfDpzs(String dzIntensity) {
        return eqLandslideRepository.getDataByDzIntensityOfDpzs(dzIntensity);
    }

    //按省统计危险性面积（【地震滑坡-逻辑斯蒂模型】）
    @Override
    public RestResponse countResultAreaByProvince(String taskId, String modelType) {
        if (PlatformObjectUtils.isEmpty(taskId) || PlatformObjectUtils.isEmpty(modelType) || !modelType.equals("4")) {
            return RestResponse.fail("按省统计危险性面积失败！");
        }
        modelType = "地震滑坡-逻辑斯蒂模型";//模型类型
        String countType = "0";//按省统计
        String currTime = System.currentTimeMillis() + "";
        String districtLayerName = "district_boundary_province";//省行政区划表
        List<DistrictBoundary> districtBoundaryList = new ArrayList<>();//省行政区划列表
        String sqlFilter = "";
        List<String> provinceList = new ArrayList<>();
        //查询任务区域
        String sql01 = "select task_district,task_name from dzzh_task where dt_id=?";
        TaskEntity taskEntity = jdbcTemplate.queryForObject(sql01, new BeanPropertyRowMapper<>(TaskEntity.class), taskId);
        String taskDistrict = taskEntity.getTaskDistrict();
        //大屏任务标识1：是大屏任务；0：不是大屏任务
        String dpFlag = "0";
        String taskName = taskEntity.getTaskName();
        if (taskName.contains("大屏展示")) {
            dpFlag = "1";
        }
        String[] tempArr = taskDistrict.split(",");
        for (int i = 0; i < tempArr.length; i++) {
            if (PlatformObjectUtils.isNotEmpty(tempArr[i])) {
                String province = tempArr[i].split("-")[0];
                provinceList.add(province);
            }
        }
        if (provinceList.size() > 0) {//应获取全省的市
            for (int i = 0; i < provinceList.size(); i++) {
                String province = provinceList.get(i);
                if (sqlFilter != "") {
                    sqlFilter += " or ";
                }
                sqlFilter += "name='" + province + "'";
            }
            //查询省行政区划编码
            String sql = "select code as divisionCode,name as province from district_boundary_province where " + sqlFilter;
            districtBoundaryList = jdbcTemplate.query(sql, new BeanPropertyRowMapper<>(DistrictBoundary.class));
        }

        //行政区划查询
        String resultLayerName_0 = "temp_行政区划_" + currTime;//计算结果数据集名称
        Map<String, Object> map0 = new HashMap<>();
        map0.put("type", "矢量过滤查询dzzhcount");
        map0.put("layerName", districtLayerName);//矢量数据集名
        map0.put("resultLayerName", resultLayerName_0);//结果数据集名称
        map0.put("sqlFilter", sqlFilter);//SQL查询表达式
        RestResponse serverToken0 = hypergraphService.getServerToken(map0);
        if (serverToken0.getCode() != 200) {
            return serverToken0;
        }

        //查询计算结果列表
        List<DzzhResultLogisticGrowth> list = getDataByTaskId4(taskId, null);
        if (list.size() > 0) {
            DzzhResultLogisticGrowth dzzhResultLogisticGrowth = list.get(0);
            String modelId = dzzhResultLogisticGrowth.getModelId();//模型ID
            //任务ID删除省统计危险性面积结果
            deleteDzzhCountAreaResultByTaskId(taskId, countType, modelId);
            for (int i = 0; i < list.size(); i++) {
                String layerName = list.get(i).getFilePath();
                String intensity = "6";//烈度
                if (layerName.endsWith("_6")) {
                    intensity = "6";
                } else if (layerName.endsWith("_7")) {
                    intensity = "7";
                } else if (layerName.endsWith("_8")) {
                    intensity = "8";
                } else if (layerName.endsWith("_9")) {
                    intensity = "9";
                } else if (layerName.endsWith("_10")) {
                    intensity = "10";
                } else if (layerName.endsWith("_11")) {
                    intensity = "11";
                } else {
                    continue;
                }
                //计算结果重分级数据集名
                String resultLayerName = layerName.replace("计算", "重分级");
                //查询重分级数据集表名
                RestResponse serverToken1 = hypergraphService.getDatasetInfoByDataset(resultLayerName);
                if (serverToken1.getCode() != 200) {
                    //栅格计算重分级
                    String countString = "Con([dzzhTifCount." + layerName + "]==-9999,-9999," +
                            "Con([dzzhTifCount." + layerName + "]<=0.01,1," +
                            "Con([dzzhTifCount." + layerName + "]<=0.03,2," +
                            "Con([dzzhTifCount." + layerName + "]<=0.09,3," +
                            "Con([dzzhTifCount." + layerName + "]<=0.27,4,5)))))";
//                    String resultLayerName = "逻辑斯蒂_重分级_" + currTime + "_" + intensity;//栅格计算结果数据集
                    Map<String, Object> map2 = new HashMap<>();
                    map2.put("type", "栅格计算重分级");
                    map2.put("layerName", layerName);//栅格数据集名称
                    map2.put("countString", countString);//栅格计算表达式
                    map2.put("resultLayerName", resultLayerName);//计算结果数据集名称
                    RestResponse serverToken2 = hypergraphService.getServerToken(map2);
                    if (serverToken2.getCode() != 200) {
                        return serverToken2;
                    }
                }
                //按省/市统计危险性面积（【地震滑坡-逻辑斯蒂模型】）
                countLjsdGridArea(currTime, intensity, resultLayerName, countType, resultLayerName_0, districtBoundaryList, taskId, dpFlag, modelId);
            }
        }

        return RestResponse.succeed(null);
    }
    private List<String> list(){
        return  Arrays.asList("1","2","3","4","5");
    }
    private Map<String,String> map(){
        Map<String,String> map = new HashMap<>();
        map.put("1","地震滑坡-层次分析法模型");
        map.put("2","地震滑坡-黄土高原模型");
        map.put("3","地震滑坡-指数模型");
        map.put("4","地震滑坡-逻辑斯蒂模型");
        map.put("5","地震滑坡-Newmark模型");
        return map;
    }
    @Override
    public RestResponse countOtherResultAreaByProvince(String taskId, String modelType) {
        String tempField = modelType;
        if (PlatformObjectUtils.isEmpty(taskId) || !list().contains(modelType)) {
            return RestResponse.fail("按省统计危险性面积失败！");
        }
        modelType = map().get(modelType);//模型类型
        String countType = "0";//按省统计
        String currTime = System.currentTimeMillis() + "";
        String districtLayerName = "district_boundary_province";//省行政区划表
        List<DistrictBoundary> districtBoundaryList = new ArrayList<>();//省行政区划列表
        String sqlFilter = "";
        List<String> provinceList = new ArrayList<>();
        //查询任务区域
        String sql01 = "select task_district,task_name from dzzh_task where dt_id=?";
        TaskEntity taskEntity = jdbcTemplate.queryForObject(sql01, new BeanPropertyRowMapper<>(TaskEntity.class), taskId);
        String taskDistrict = taskEntity.getTaskDistrict();
        //大屏任务标识1：是大屏任务；0：不是大屏任务
        String dpFlag = "0";
        String taskName = taskEntity.getTaskName();
        if (taskName.contains("大屏展示")) {
            dpFlag = "1";
        }
        String[] tempArr = taskDistrict.split(",");
        for (int i = 0; i < tempArr.length; i++) {
            if (PlatformObjectUtils.isNotEmpty(tempArr[i])) {
                String province = tempArr[i].split("-")[0];
                provinceList.add(province);
            }
        }
        if (provinceList.size() > 0) {//应获取全省的市
            for (int i = 0; i < provinceList.size(); i++) {
                String province = provinceList.get(i);
                if (sqlFilter != "") {
                    sqlFilter += " or ";
                }
                sqlFilter += "name='" + province + "'";
            }
            //查询省行政区划编码
            String sql = "select code as divisionCode,name as province from district_boundary_province where " + sqlFilter;
            districtBoundaryList = jdbcTemplate.query(sql, new BeanPropertyRowMapper<>(DistrictBoundary.class));
        }

        //行政区划查询
        String resultLayerName_0 = "temp_行政区划_" + currTime;//计算结果数据集名称
        Map<String, Object> map0 = new HashMap<>();
        map0.put("type", "矢量过滤查询dzzhcount");
        map0.put("layerName", districtLayerName);//矢量数据集名
        map0.put("resultLayerName", resultLayerName_0);//结果数据集名称
        map0.put("sqlFilter", sqlFilter);//SQL查询表达式
        RestResponse serverToken0 = hypergraphService.getServerToken(map0);
        if (serverToken0.getCode() != 200) {
            return serverToken0;
        }

        List<PublicEntity> list = new ArrayList<>();
        //查询计算结果列表
        if("1".equals(tempField)){
            List<DzzhResultCcfx> dataByTaskId1 = getDataByTaskId1(taskId, null);
            if (dataByTaskId1.size() > 0) {
                dataByTaskId1.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId()).setProExceed(entity.getProExceed());
                    list.add(publicEntity);
                });
            }
        }else if("2".equals(tempField)){
            List<DzzhResultLoessPlateau> dataByTaskId2 = getDataByTaskId2(taskId, null);
            if (dataByTaskId2.size() > 0) {
                dataByTaskId2.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId()).setProExceed(entity.getProExceed());
                    list.add(publicEntity);
                });
            }
        }else if("3".equals(tempField)){
            List<DzzhResultExponential> dataByTaskId3 = getDataByTaskId3(taskId, null);
            if (dataByTaskId3.size() > 0) {
                dataByTaskId3.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId());
                    list.add(publicEntity);
                });
            }
        }else if("4".equals(tempField)){
            List<DzzhResultLogisticGrowth> dataByTaskId4 = getDataByTaskId4(taskId, null);
            if (dataByTaskId4.size() > 0) {
                dataByTaskId4.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId());
                    list.add(publicEntity);
                });
            }
        }else if("5".equals(tempField)){
            List<DzzhResultNewmark> dataByTaskId5 = getDataByTaskId5(taskId, null);
            if (dataByTaskId5.size() > 0) {
                dataByTaskId5.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId()).setProExceed(entity.getProExceed());
                    list.add(publicEntity);
                });
            }
        }
        if (list.size() > 0) {
            PublicEntity publicEntity = list.get(0);
            String modelId = publicEntity.getModelId();//模型ID
            //任务ID删除省统计危险性面积结果
            deleteDzzhCountAreaResultOtherByTaskId(taskId, countType, modelId,modelType);
            for (int i = 0; i < list.size(); i++) {
                PublicEntity entity = list.get(i);
                RestResponse restResponse = publicMethod(entity, currTime, resultLayerName_0, districtBoundaryList, taskId, modelId,modelType,"0");
                if(restResponse!=null){
                    return restResponse;
                }
            }
        }
        return RestResponse.succeed(null);
    }

    private void deleteDzzhCountAreaResultOtherByTaskId(String taskId, String type, String modelId, String modelType) {
        String sql = "delete from dzzh_count_area_result_other where task_id = '" + taskId + "' and model_id='" + modelId + "'";
        if (PlatformObjectUtils.isNotEmpty(type)) {
            sql += " and type='" + type + "'";//type 统计方式：0省 1市
        }
        if (PlatformObjectUtils.isNotEmpty(modelType)) {
            sql += " and model_type='" + modelType + "'";//1、
        }
        jdbcTemplate.update(sql);
    }

    public RestResponse publicMethod(PublicEntity publicEntity,String currTime,String resultLayerName_0, List<DistrictBoundary> districtBoundaryList,String taskId,String modelId,String modelType,String countType){
        String layerName  = publicEntity.getFilePath();
        String intensity = "";
        //计算结果重分级数据集名
        String resultLayerName = "";
        if("地震滑坡-层次分析法模型".equals(modelType) || "地震滑坡-黄土高原模型".equals(modelType) || "地震滑坡-Newmark模型".equals(modelType) || "地震滑坡-指数模型".equals(modelType)){
            if("地震滑坡-指数模型".equals(modelType)){
                intensity = publicEntity.getFilePath().split("_")[3];
                resultLayerName = layerName.replace("计算", "重分级");
            }else{
                intensity = publicEntity.getProExceed();
                if("地震滑坡-层次分析法模型".equals(modelType)){
                    resultLayerName = layerName.replace("重分类", "重分级");
                }else{
                    resultLayerName = layerName.replace("最终计算", "重分级");
                }

            }

        }else{
            intensity = "6";//烈度
            if (layerName.endsWith("_6")) {
                intensity = "6";
            } else if (layerName.endsWith("_7")) {
                intensity = "7";
            } else if (layerName.endsWith("_8")) {
                intensity = "8";
            } else if (layerName.endsWith("_9")) {
                intensity = "9";
            } else if (layerName.endsWith("_10")) {
                intensity = "10";
            } else if (layerName.endsWith("_11")) {
                intensity = "11";
            } else {
                intensity = "";
            }
            resultLayerName = layerName.replace("计算", "重分级");
        }

        //查询重分级数据集表名
        RestResponse serverToken1 = hypergraphService.getDatasetInfoByDataset(resultLayerName);
        if (serverToken1.getCode() != 200) {
            //栅格计算重分级
            String countString = "Con([dzzhTifCount." + layerName + "]==-9999,-9999," +
                    "Con([dzzhTifCount." + layerName + "]<=0.01,1," +
                    "Con([dzzhTifCount." + layerName + "]<=0.03,2," +
                    "Con([dzzhTifCount." + layerName + "]<=0.09,3," +
                    "Con([dzzhTifCount." + layerName + "]<=0.27,4,5)))))";
//                    String resultLayerName = "逻辑斯蒂_重分级_" + currTime + "_" + intensity;//栅格计算结果数据集
            Map<String, Object> map2 = new HashMap<>();
            map2.put("type", "栅格计算重分级");
            map2.put("layerName", layerName);//栅格数据集名称
            map2.put("countString", countString);//栅格计算表达式
            map2.put("resultLayerName", resultLayerName);//计算结果数据集名称
            RestResponse serverToken2 = hypergraphService.getServerToken(map2);
            if (serverToken2.getCode() != 200) {
                return serverToken2;
            }
        }
        //按省/市统计危险性面积（【地震滑坡-逻辑斯蒂模型】）
        countOtherGridArea(currTime, intensity, resultLayerName, countType, resultLayerName_0, districtBoundaryList, taskId, "0", modelId,modelType);
        return null;
    }

    //按省/市统计危险性面积（【地震滑坡-逻辑斯蒂模型】）
    public RestResponse countOtherGridArea(String currTime, String intensity, String layerName, String countType, String districtLayerName,
                                          List<DistrictBoundary> districtBoundaryList, String taskId, String dpFlag, String modelId,String modelType) {
        //区域制表统计栅格分级面积
        Map<String, Object> map1 = new HashMap<>();
        map1.put("type", "区域制表统计栅格分级面积");//面积单位：万平方公里
        map1.put("layerName", layerName);
        map1.put("districtLayerName", districtLayerName);
        String resultLayerName1 = "temp_ljsd_table_" + currTime + "_" + intensity;//裁剪结果数据集
        map1.put("resultLayerName", resultLayerName1);
        RestResponse serverToken1 = hypergraphService.getServerToken(map1);
        if (serverToken1.getCode() != 200) {
            return serverToken1;
        }

        List<DzzhCountAreaResult> dzzhCountAreaResultList = new ArrayList<>();
        //查询字段列表
        String sql1 = "SELECT column_name FROM information_schema.columns WHERE table_name = '" + resultLayerName1 + "';";
        List<String> columnList = jdbcTemplate.queryForList(sql1, String.class);
        //查询结果列表
        String sql2 = "select value from " + resultLayerName1 + " order by value";
        List<Integer> valueList = jdbcTemplate.queryForList(sql2, Integer.class);
        if (valueList != null && valueList.size() > 0) {
            for (Integer value : valueList) {
                //遍历行政区划区域数据
                for (DistrictBoundary districtBoundary : districtBoundaryList) {
                    String code = districtBoundary.getDivisionCode();
                    String valueCode = "value_" + code;
                    for (String column : columnList) {
                        if (column.equals(valueCode)) {
                            //查询面积（平方公里）、占比（%）
                            String sql3 = "select b.code,b.name,a.value as gridvalue,a." + valueCode + "*10000 as area,CAST(b.area AS DECIMAL) / 1000000 as total_area,a." + valueCode + "*10000/(CAST(b.area AS DECIMAL) / 1000000) * 100 as percent " +
                                    "from " + resultLayerName1 + " a,s_area b " +
                                    "where a.value = " + value + " and b.code='" + code + "'";//s_area表面积字段单位（平方米）
                            List<DzzhCountAreaResult> dzzhCountAreaResultList1 = jdbcTemplate.query(sql3, new BeanPropertyRowMapper<>(DzzhCountAreaResult.class));
                            for (int m = 0; m < dzzhCountAreaResultList1.size(); m++) {
                                DzzhCountAreaResult dzzhCountAreaResult = dzzhCountAreaResultList1.get(m);
                                String uuid = UUIDGenerator.getUUID();
                                dzzhCountAreaResult.setId(uuid);
                                dzzhCountAreaResult.setTaskId(taskId);
                                dzzhCountAreaResult.setModelType(modelType);//模型类型
                                dzzhCountAreaResult.setDpFlag(dpFlag);//大屏任务标识1：是大屏任务；0：不是大屏任务
                                dzzhCountAreaResult.setDzIntensity(intensity);//地震烈度6-11
                                dzzhCountAreaResult.setLayerName(layerName);//图层名称
                                dzzhCountAreaResult.setType(countType);//统计方式：0省 1市
                                dzzhCountAreaResult.setModelId(modelId);//模型ID
                                dzzhCountAreaResultList.add(dzzhCountAreaResult);
                            }
                            break;
                        }
                    }
                }
            }
        }
        //保存统计危险性面积结果
        saveDzzhCountAreaOtherResultList(dzzhCountAreaResultList);
        //删除临时区域面积计算结果表
        jdbcTemplate.update("drop table " + resultLayerName1);

        return RestResponse.succeed(null);
    }
    //按省/市统计危险性面积（【地震滑坡-逻辑斯蒂模型】）
    public RestResponse countLjsdGridArea(String currTime, String intensity, String layerName, String countType, String districtLayerName,
                                          List<DistrictBoundary> districtBoundaryList, String taskId, String dpFlag, String modelId) {
        //区域制表统计栅格分级面积
        Map<String, Object> map1 = new HashMap<>();
        map1.put("type", "区域制表统计栅格分级面积");//面积单位：万平方公里
        map1.put("layerName", layerName);
        map1.put("districtLayerName", districtLayerName);
        String resultLayerName1 = "temp_ljsd_table_" + currTime + "_" + intensity;//裁剪结果数据集
        map1.put("resultLayerName", resultLayerName1);
        RestResponse serverToken1 = hypergraphService.getServerToken(map1);
        if (serverToken1.getCode() != 200) {
            return serverToken1;
        }

        List<DzzhCountAreaResult> dzzhCountAreaResultList = new ArrayList<>();
        //查询字段列表
        String sql1 = "SELECT column_name FROM information_schema.columns WHERE table_name = '" + resultLayerName1 + "';";
        List<String> columnList = jdbcTemplate.queryForList(sql1, String.class);
        //查询结果列表
        String sql2 = "select value from " + resultLayerName1 + " order by value";
        List<Integer> valueList = jdbcTemplate.queryForList(sql2, Integer.class);
        if (valueList != null && valueList.size() > 0) {
            for (Integer value : valueList) {
                //遍历行政区划区域数据
                for (DistrictBoundary districtBoundary : districtBoundaryList) {
                    String code = districtBoundary.getDivisionCode();
                    String valueCode = "value_" + code;
                    for (String column : columnList) {
                        if (column.equals(valueCode)) {
                            //查询面积（平方公里）、占比（%）
                            String sql3 = "select b.code,b.name,a.value as gridvalue,a." + valueCode + "*10000 as area,CAST(b.area AS DECIMAL) / 1000000 as total_area,a." + valueCode + "*10000/(CAST(b.area AS DECIMAL) / 1000000) * 100 as percent " +
                                    "from " + resultLayerName1 + " a,s_area b " +
                                    "where a.value = " + value + " and b.code='" + code + "'";//s_area表面积字段单位（平方米）
                            List<DzzhCountAreaResult> dzzhCountAreaResultList1 = jdbcTemplate.query(sql3, new BeanPropertyRowMapper<>(DzzhCountAreaResult.class));
                            for (int m = 0; m < dzzhCountAreaResultList1.size(); m++) {
                                DzzhCountAreaResult dzzhCountAreaResult = dzzhCountAreaResultList1.get(m);
                                String uuid = UUIDGenerator.getUUID();
                                dzzhCountAreaResult.setId(uuid);
                                dzzhCountAreaResult.setTaskId(taskId);
                                dzzhCountAreaResult.setModelType("地震滑坡-逻辑斯蒂模型");//模型类型
                                dzzhCountAreaResult.setDpFlag(dpFlag);//大屏任务标识1：是大屏任务；0：不是大屏任务
                                dzzhCountAreaResult.setDzIntensity(intensity);//地震烈度6-11
                                dzzhCountAreaResult.setLayerName(layerName);//图层名称
                                dzzhCountAreaResult.setType(countType);//统计方式：0省 1市
                                dzzhCountAreaResult.setModelId(modelId);//模型ID
                                dzzhCountAreaResultList.add(dzzhCountAreaResult);
                            }
                            break;
                        }
                    }
                }
            }
        }
        //保存统计危险性面积结果
        saveDzzhCountAreaResultList(dzzhCountAreaResultList);
        //删除临时区域面积计算结果表
        jdbcTemplate.update("drop table " + resultLayerName1);

        return RestResponse.succeed(null);
    }

    //按市统计危险性面积（【地震滑坡-逻辑斯蒂模型】分析评估结果）
    @Override
    public RestResponse countResultAreaByCity(String taskId, String modelType) {
        if (PlatformObjectUtils.isEmpty(taskId) || PlatformObjectUtils.isEmpty(modelType) || !modelType.equals("4")) {
            return RestResponse.fail("按市统计危险性面积失败！");
        }
        modelType = "地震滑坡-逻辑斯蒂模型";//模型类型
        String countType = "1";//按市统计
        String currTime = System.currentTimeMillis() + "";
        String districtLayerName = "district_boundary_city";//市行政区划表
        List<DistrictBoundary> districtBoundaryList = new ArrayList<>();//市行政区划列表
        String sqlFilter = "";
        //查询任务区域
        String sql01 = "select task_district,task_name from dzzh_task where dt_id=?";
        TaskEntity taskEntity = jdbcTemplate.queryForObject(sql01, new BeanPropertyRowMapper<>(TaskEntity.class), taskId);
        String taskDistrict = taskEntity.getTaskDistrict();
        //大屏任务标识1：是大屏任务；0：不是大屏任务
        String dpFlag = "0";
        String taskName = taskEntity.getTaskName();
        if (taskName.contains("大屏展示")) {
            dpFlag = "1";
        }
        String[] tempArr = taskDistrict.split(",");
        List<String> provinceList = new ArrayList<>();
        for (int i = 0; i < tempArr.length; i++) {
            String[] tempStrArr = tempArr[i].split("-");
            if (tempStrArr.length > 1) {
                String province = tempStrArr[0];
                String city = tempStrArr[1];
                if (sqlFilter != "") {
                    sqlFilter += " or ";
                }
                String sql0 = "select b.code from district_boundary_province a,district_boundary_city b " +
                        "where a.name='" + province + "' and b.name='" + city + "' and subString(a.code,1,2) = subString(b.code,1,2)";
                List<String> codeList = jdbcTemplate.queryForList(sql0, String.class);
                if (codeList != null && codeList.size() > 0) {
                    String cityCode = codeList.get(0);
                    sqlFilter += "code='" + cityCode + "'";
                }
            } else {
                String province = tempStrArr[0];
                if (PlatformObjectUtils.isNotEmpty(province)) {
                    provinceList.add(province);
                }
            }
        }
        if (provinceList.size() > 0) {//应获取全省的市
            String provinceStr = "";
            for (int i = 0; i < provinceList.size(); i++) {
                String province = provinceList.get(i);
                if (i > 0) {
                    provinceStr += ",";
                }
                provinceStr += "'" + province + "'";
            }
            //查询省行政区划编码前2位
            String sql = "select subString(code,1,2) as code from district_boundary_province where name in (" + provinceStr + ")";
            List<String> codeList = jdbcTemplate.queryForList(sql, String.class);
            if (codeList != null && codeList.size() > 0) {
                for (int i = 0; i < codeList.size(); i++) {
                    String code = codeList.get(i);//行政区划编码前2位
                    if (sqlFilter != "") {
                        sqlFilter += " or ";
                    }
                    sqlFilter += "code like '" + code + "%'";
                }
            }
        }
        //查询市行政区划列表
        String sql1 = "select code as divisionCode,name as city from district_boundary_city where " + sqlFilter;
        districtBoundaryList = jdbcTemplate.query(sql1, new BeanPropertyRowMapper<>(DistrictBoundary.class));

        //行政区划查询
        String resultLayerName_0 = "temp_行政区划_" + currTime;//计算结果数据集名称
        Map<String, Object> map0 = new HashMap<>();
        map0.put("type", "矢量过滤查询dzzhcount");
        map0.put("layerName", districtLayerName);//矢量数据集名
        map0.put("resultLayerName", resultLayerName_0);//结果数据集名称
        map0.put("sqlFilter", sqlFilter);//SQL查询表达式
        RestResponse serverToken0 = hypergraphService.getServerToken(map0);
        if (serverToken0.getCode() != 200) {
            return serverToken0;
        }

        //查询计算结果列表
        List<DzzhResultLogisticGrowth> list = getDataByTaskId4(taskId, null);
        if (list.size() > 0) {
            DzzhResultLogisticGrowth dzzhResultLogisticGrowth = list.get(0);
            String modelId = dzzhResultLogisticGrowth.getModelId();//模型ID
            //任务ID删除市统计危险性面积结果
            deleteDzzhCountAreaResultByTaskId(taskId, countType, modelId);
            for (int i = 0; i < list.size(); i++) {
                String layerName = list.get(i).getFilePath();
                String intensity = "6";//烈度
                if (layerName.endsWith("_6")) {
                    intensity = "6";
                } else if (layerName.endsWith("_7")) {
                    intensity = "7";
                } else if (layerName.endsWith("_8")) {
                    intensity = "8";
                } else if (layerName.endsWith("_9")) {
                    intensity = "9";
                } else if (layerName.endsWith("_10")) {
                    intensity = "10";
                } else if (layerName.endsWith("_11")) {
                    intensity = "11";
                } else {
                    continue;
                }
                //计算结果重分级数据集名
                String resultLayerName = layerName.replace("计算", "重分级");
                //查询重分级数据集表名
                RestResponse serverToken1 = hypergraphService.getDatasetInfoByDataset(resultLayerName);
                if (serverToken1.getCode() != 200) {
                    //栅格计算重分级
                    String countString = "Con([dzzhTifCount." + layerName + "]==-9999,-9999," +
                            "Con([dzzhTifCount." + layerName + "]<=0.01,1," +
                            "Con([dzzhTifCount." + layerName + "]<=0.03,2," +
                            "Con([dzzhTifCount." + layerName + "]<=0.09,3," +
                            "Con([dzzhTifCount." + layerName + "]<=0.27,4,5)))))";
//                    String resultLayerName = "逻辑斯蒂_重分级_" + currTime + "_" + intensity;//栅格计算结果数据集
                    Map<String, Object> map2 = new HashMap<>();
                    map2.put("type", "栅格计算重分级");
                    map2.put("layerName", layerName);//栅格数据集名称
                    map2.put("countString", countString);//栅格计算表达式
                    map2.put("resultLayerName", resultLayerName);//计算结果数据集名称
                    RestResponse serverToken2 = hypergraphService.getServerToken(map2);
                    if (serverToken2.getCode() != 200) {
                        return serverToken2;
                    }
                }
                //用区域裁剪栅格后矢量化统计面积
                countLjsdGridArea(currTime, intensity, resultLayerName, countType, resultLayerName_0, districtBoundaryList, taskId, dpFlag, modelId);
            }
        }

        return RestResponse.succeed(null);
    }

    @Override
    public RestResponse countOtherResultAreaByCity(String taskId, String modelType) {
        if (PlatformObjectUtils.isEmpty(taskId) || !list().contains(modelType)) {
            return RestResponse.fail("按市统计危险性面积失败！");
        }
        String tempField = modelType;
        modelType = map().get(modelType);//模型类型
        String countType = "1";//按市统计
        String currTime = System.currentTimeMillis() + "";
        String districtLayerName = "district_boundary_city";//市行政区划表
        List<DistrictBoundary> districtBoundaryList = new ArrayList<>();//市行政区划列表
        String sqlFilter = "";
        //查询任务区域
        String sql01 = "select task_district,task_name from dzzh_task where dt_id=?";
        TaskEntity taskEntity = jdbcTemplate.queryForObject(sql01, new BeanPropertyRowMapper<>(TaskEntity.class), taskId);
        String taskDistrict = taskEntity.getTaskDistrict();
        //大屏任务标识1：是大屏任务；0：不是大屏任务
        String dpFlag = "0";
        String taskName = taskEntity.getTaskName();
        if (taskName.contains("大屏展示")) {
            dpFlag = "1";
        }
        String[] tempArr = taskDistrict.split(",");
        List<String> provinceList = new ArrayList<>();
        for (int i = 0; i < tempArr.length; i++) {
            String[] tempStrArr = tempArr[i].split("-");
            if (tempStrArr.length > 1) {
                String province = tempStrArr[0];
                String city = tempStrArr[1];
                if (sqlFilter != "") {
                    sqlFilter += " or ";
                }
                String sql0 = "select b.code from district_boundary_province a,district_boundary_city b " +
                        "where a.name='" + province + "' and b.name='" + city + "' and subString(a.code,1,2) = subString(b.code,1,2)";
                List<String> codeList = jdbcTemplate.queryForList(sql0, String.class);
                if (codeList != null && codeList.size() > 0) {
                    String cityCode = codeList.get(0);
                    sqlFilter += "code='" + cityCode + "'";
                }
            } else {
                String province = tempStrArr[0];
                if (PlatformObjectUtils.isNotEmpty(province)) {
                    provinceList.add(province);
                }
            }
        }
        if (provinceList.size() > 0) {//应获取全省的市
            String provinceStr = "";
            for (int i = 0; i < provinceList.size(); i++) {
                String province = provinceList.get(i);
                if (i > 0) {
                    provinceStr += ",";
                }
                provinceStr += "'" + province + "'";
            }
            //查询省行政区划编码前2位
            String sql = "select subString(code,1,2) as code from district_boundary_province where name in (" + provinceStr + ")";
            List<String> codeList = jdbcTemplate.queryForList(sql, String.class);
            if (codeList != null && codeList.size() > 0) {
                for (int i = 0; i < codeList.size(); i++) {
                    String code = codeList.get(i);//行政区划编码前2位
                    if (sqlFilter != "") {
                        sqlFilter += " or ";
                    }
                    sqlFilter += "code like '" + code + "%'";
                }

            }
        }
        //查询市行政区划列表
        String sql1 = "select code as divisionCode,name as city from district_boundary_city where " + sqlFilter;
        districtBoundaryList = jdbcTemplate.query(sql1, new BeanPropertyRowMapper<>(DistrictBoundary.class));
        //行政区划查询
        String resultLayerName_0 = "temp_行政区划_" + currTime;//计算结果数据集名称
        Map<String, Object> map0 = new HashMap<>();
        map0.put("type", "矢量过滤查询dzzhcount");
        map0.put("layerName", districtLayerName);//矢量数据集名
        map0.put("resultLayerName", resultLayerName_0);//结果数据集名称
        map0.put("sqlFilter", sqlFilter);//SQL查询表达式
        RestResponse serverToken0 = hypergraphService.getServerToken(map0);
        if (serverToken0.getCode() != 200) {
            return serverToken0;
        }
        List<PublicEntity> list = new ArrayList<>();
        //查询计算结果列表
        if("1".equals(tempField)){
            List<DzzhResultCcfx> dataByTaskId1 = getDataByTaskId1(taskId, null);
            if (dataByTaskId1.size() > 0) {
                dataByTaskId1.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId()).setProExceed(entity.getProExceed());
                    list.add(publicEntity);
                });
            }
        }else if("2".equals(tempField)){
            List<DzzhResultLoessPlateau> dataByTaskId2 = getDataByTaskId2(taskId, null);
            if (dataByTaskId2.size() > 0) {
                dataByTaskId2.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId()).setProExceed(entity.getProExceed());
                    list.add(publicEntity);
                });
            }
        }else if("3".equals(tempField)){
            List<DzzhResultExponential> dataByTaskId3 = getDataByTaskId3(taskId, null);
            if (dataByTaskId3.size() > 0) {
                dataByTaskId3.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId());
                    list.add(publicEntity);
                });
            }
        }else if("4".equals(tempField)){
            List<DzzhResultLogisticGrowth> dataByTaskId4 = getDataByTaskId4(taskId, null);
            if (dataByTaskId4.size() > 0) {
                dataByTaskId4.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId());
                    list.add(publicEntity);
                });
            }
        }else if("5".equals(tempField)){
            List<DzzhResultNewmark> dataByTaskId5 = getDataByTaskId5(taskId, null);
            if (dataByTaskId5.size() > 0) {
                dataByTaskId5.forEach(entity->{
                    PublicEntity publicEntity = new PublicEntity();
                    publicEntity.setFilePath(entity.getFilePath()).setModelId(entity.getModelId()).setProExceed(entity.getProExceed());
                    list.add(publicEntity);
                });
            }
        }
        if (list.size() > 0) {
            PublicEntity publicEntity = list.get(0);
            String modelId = publicEntity.getModelId();//模型ID
            //任务ID删除省统计危险性面积结果
            deleteDzzhCountAreaResultOtherByTaskId(taskId, countType, modelId,modelType);
            for (int i = 0; i < list.size(); i++) {
                PublicEntity entity = list.get(i);
                RestResponse restResponse = publicMethod(entity, currTime, resultLayerName_0, districtBoundaryList, taskId, modelId,modelType,"1");
                if(restResponse!=null){
                    return restResponse;
                }
            }
        }
        return RestResponse.succeed(null);
    }

    @Override
    public RestResponse countResultAreaByCounty(String taskId, String modelType) {
        if (PlatformObjectUtils.isEmpty(taskId) || !list().contains(modelType)) {
            return RestResponse.fail("按区县统计危险性面积失败！");
        }
        String countType = "2";//按区县统计
        String currTime = System.currentTimeMillis() + "";
        String districtLayerName = "district_boundary_county";//区县行政区划表
        List<DistrictBoundary> districtBoundaryList = new ArrayList<>();//区县行政区划列表
        String sqlFilter = "";
        //查询任务区域
        String sql01 = "select task_district,task_name from dzzh_task where dt_id=?";
        TaskEntity taskEntity = jdbcTemplate.queryForObject(sql01, new BeanPropertyRowMapper<>(TaskEntity.class), taskId);
        //任务区域
        String taskDistrict = taskEntity.getTaskDistrict();
        //大屏任务标识1：是大屏任务；0：不是大屏任务
        String dpFlag = "0";
        String taskName = taskEntity.getTaskName();
        if (taskName.contains("大屏展示")) {
            dpFlag = "1";
        }
        String[] tempArr = taskDistrict.split(",");
        List<String> provinceList = new ArrayList<>();
        List<String> cityList = new ArrayList<>();
        for (int i = 0; i < tempArr.length; i++) {
            String[] tempStrArr = tempArr[i].split("-");
            if(tempStrArr.length > 2){
                //四川省-成都市-邛崃市
                String province = tempStrArr[0];
                String city = tempStrArr[1];
                String county = tempStrArr[2];
                if (sqlFilter != "") {
                    sqlFilter += " or ";
                }
                String sql0 = "select c.code from district_boundary_province a,district_boundary_city b,district_boundary_county c " +
                        "where a.name='" + province + "' and b.name='" + city + "' and c.name='" + county + "' and subString(a.code,1,2) = subString(b.code,1,2) and subString(b.code,1,4) = subString(c.code,1,4)";
                List<String> codeList = jdbcTemplate.queryForList(sql0, String.class);
                if (PlatformObjectUtils.isNotEmpty(codeList) && codeList.size() > 0) {
                    String cityCode = codeList.get(0);
                    sqlFilter += "code='" + cityCode + "'";
                }
            }else if (tempStrArr.length > 1) {
                //辽宁省-大连市
                String province = tempStrArr[0];
                String city = tempStrArr[1];
                cityList.add(province+"_"+city);
            } else {
                String province = tempStrArr[0];
                //辽宁省
                if (PlatformObjectUtils.isNotEmpty(province)) {
                    provinceList.add(province);
                }
            }
        }
        if(cityList.size()>0){
            //获取每个市的区县
            String sql = "select subString(code,1,4) as code from district_boundary_city where 1=1 ";
            int i = 0;
            for (; i < cityList.size(); i++) {
                String area = cityList.get(i);
                //查询省code的前两位
                String subSql = "select subString(code,1,2) as code from district_boundary_province where name = '"+area.split("_")[0]+"'";
                List<String> codeList = jdbcTemplate.queryForList(subSql, String.class);
                String provinceCode = "";
                if (PlatformObjectUtils.isNotEmpty(codeList) && codeList.size() > 0) {
                    provinceCode = codeList.get(0);
                }
                if (i > 0) {
                    sql += " or ";
                }
                if(StringUtils.isNotBlank(provinceCode)){
                    sql+=" and  (name='"+area.split("_")[1]+"' and code like '" + provinceCode + "%')";
                }else{
                    sql+=" and  (name='"+area.split("_")[1]+"')";
                }
            }
            if(i>0){
                //市code
                List<String> codeList = jdbcTemplate.queryForList(sql, String.class);
                if (PlatformObjectUtils.isNotEmpty(codeList) && codeList.size() > 0) {
                    for (int j = 0; j < codeList.size(); j++) {
                        String code = codeList.get(j);//行政区划编码前4位
                        if (sqlFilter != "") {
                            sqlFilter += " or ";
                        }
                        sqlFilter += "code like '" + code + "%'";
                    }
                }
            }
        }
        if (provinceList.size() > 0) {//应获取全省的市
            String provinceStr = "";
            for (int i = 0; i < provinceList.size(); i++) {
                String province = provinceList.get(i);
                if (i > 0) {
                    provinceStr += ",";
                }
                provinceStr += "'" + province + "'";
            }
            //查询省行政区划编码前2位
            String sql = "select subString(code,1,2) as code from district_boundary_province where name in (" + provinceStr + ")";
            List<String> codeList = jdbcTemplate.queryForList(sql, String.class);
            if (PlatformObjectUtils.isNotEmpty(codeList) && codeList.size() > 0) {
                for (int i = 0; i < codeList.size(); i++) {
                    String code = codeList.get(i);//行政区划编码前2位
                    if (sqlFilter != "") {
                        sqlFilter += " or ";
                    }
                    sqlFilter += "code like '" + code + "%'";
                }
            }
        }
        //查询区县行政区划列表
        String sql1 = "select code as divisionCode,name as city from district_boundary_county where " + sqlFilter;
        //区县code集合
        districtBoundaryList = jdbcTemplate.query(sql1, new BeanPropertyRowMapper<>(DistrictBoundary.class));
        //行政区划查询
        String resultLayerName_0 = "temp_行政区划_" + currTime;//计算结果数据集名称
        Map<String, Object> map0 = new HashMap<>();
        map0.put("type", "矢量过滤查询dzzhcount");
        map0.put("layerName", districtLayerName);//矢量数据集名
        map0.put("resultLayerName", resultLayerName_0);//结果数据集名称
        map0.put("sqlFilter", sqlFilter);//SQL查询表达式
        RestResponse serverToken0 = hypergraphService.getServerToken(map0);
        if (serverToken0.getCode() != 200) {
            return serverToken0;
        }

        //查询计算结果列表
        List<DzzhResultLogisticGrowth> list = getDataByTaskId4(taskId, null);
        if (list.size() > 0) {
            DzzhResultLogisticGrowth dzzhResultLogisticGrowth = list.get(0);
            String modelId = dzzhResultLogisticGrowth.getModelId();//模型ID
            //任务ID删除市统计危险性面积结果
            deleteDzzhCountAreaResultByTaskId(taskId, countType, modelId);
            for (int i = 0; i < list.size(); i++) {
                String layerName = list.get(i).getFilePath();
                String intensity = "6";//烈度
                if (layerName.endsWith("_6")) {
                    intensity = "6";
                } else if (layerName.endsWith("_7")) {
                    intensity = "7";
                } else if (layerName.endsWith("_8")) {
                    intensity = "8";
                } else if (layerName.endsWith("_9")) {
                    intensity = "9";
                } else if (layerName.endsWith("_10")) {
                    intensity = "10";
                } else if (layerName.endsWith("_11")) {
                    intensity = "11";
                } else {
                    continue;
                }
                //计算结果重分级数据集名
                String resultLayerName = layerName.replace("计算", "重分级");
                //查询重分级数据集表名
                RestResponse serverToken1 = hypergraphService.getDatasetInfoByDataset(resultLayerName);
                if (serverToken1.getCode() != 200) {
                    //栅格计算重分级
                    String countString = "Con([dzzhTifCount." + layerName + "]==-9999,-9999," +
                            "Con([dzzhTifCount." + layerName + "]<=0.01,1," +
                            "Con([dzzhTifCount." + layerName + "]<=0.03,2," +
                            "Con([dzzhTifCount." + layerName + "]<=0.09,3," +
                            "Con([dzzhTifCount." + layerName + "]<=0.27,4,5)))))";
//                    String resultLayerName = "逻辑斯蒂_重分级_" + currTime + "_" + intensity;//栅格计算结果数据集
                    Map<String, Object> map2 = new HashMap<>();
                    map2.put("type", "栅格计算重分级");
                    map2.put("layerName", layerName);//栅格数据集名称
                    map2.put("countString", countString);//栅格计算表达式
                    map2.put("resultLayerName", resultLayerName);//计算结果数据集名称
                    RestResponse serverToken2 = hypergraphService.getServerToken(map2);
                    if (serverToken2.getCode() != 200) {
                        return serverToken2;
                    }
                }
                //用区域裁剪栅格后矢量化统计面积
                countLjsdGridArea(currTime, intensity, resultLayerName, countType, resultLayerName_0, districtBoundaryList, taskId, dpFlag, modelId);
            }
        }

        return RestResponse.succeed(null);
    }

    //保存统计危险性面积结果
    @Override
    public void saveDzzhCountAreaResultList(List<DzzhCountAreaResult> dzzhCountAreaResultList) {
        SqlParameterSource[] beanSources = SqlParameterSourceUtils.createBatch(dzzhCountAreaResultList.toArray());
        String sql = "insert into dzzh_count_area_result  values(" +
                ":id,:name,:gridvalue,:area,:totalArea,:percent,:type,:layerName,:dzIntensity,:taskId,:dpFlag,:modelType,:code,:modelId,:extend1)";
        namedParameterJdbcTemplate.batchUpdate(sql, beanSources);
    }

    private void saveDzzhCountAreaOtherResultList(List<DzzhCountAreaResult> dzzhCountAreaResultList) {
        SqlParameterSource[] beanSources = SqlParameterSourceUtils.createBatch(dzzhCountAreaResultList.toArray());
        String sql = "insert into dzzh_count_area_result_other  values(" +
                ":id,:name,:gridvalue,:area,:totalArea,:percent,:type,:layerName,:dzIntensity,:taskId,:dpFlag,:modelType,:code,:modelId,:extend1)";
        namedParameterJdbcTemplate.batchUpdate(sql, beanSources);
    }

    //任务ID删除统计危险性面积结果
    @Override
    public void deleteDzzhCountAreaResultByTaskId(String taskId, String type, String modelId) {
        String sql = "delete from dzzh_count_area_result where task_id = '" + taskId + "' and model_id='" + modelId + "'";
        if (PlatformObjectUtils.isNotEmpty(type)) {
            sql += " and type='" + type + "'";//type 统计方式：0省 1市
        }
        jdbcTemplate.update(sql);
    }

    //查询统计数据集表名
    @Override
    public String getResultLayerNameParams(String modelType, String taskId, String modelId, String intensity, String pga) {
        String resultLayerName = "";
        try {
            String layerNameSql = "select layer_name from dzzh_count_area_result " +
                    "where model_type='" + modelType + "' and task_id='" + taskId + "' " +
                    "and model_id='" + modelId + "' and dz_intensity='" + intensity + "'";
            if (modelType.equals("砂土液化-层次分析法")) {
                layerNameSql += " and extend1='" + pga + "'";
            }
            List<String> layerNameList = jdbcTemplate.queryForList(layerNameSql, String.class);
            if (layerNameList != null && layerNameList.size() > 0) {
                resultLayerName = layerNameList.get(0);
            }
        } catch (Exception e) {
            resultLayerName = "";
        }

        return resultLayerName;
    }

    @Override
    public List<CountAreaResult> getCountAreaResult() {
        return jdbcTemplate.query("select * from dzzh_count_area_result where dp_flag='1'", new BeanPropertyRowMapper<>(CountAreaResult.class));
    }

    @Override
    public HashMap<String, Object> largeScreen(LargeScreenParam param) {
        HashMap<String, Object> resultMap = new HashMap<>();
        List<String> idList = eqLandslideRepository.getByEstimateType(param);
        if (PlatformObjectUtils.isNotEmpty(idList)) {
            String ids = idList.stream()
                    .collect(Collectors.joining(","));
            List<HazardousAreaDto> areaList = eqLandslideRepository.getHazardousArea(spilt(ids), param);
            //地图用的路径
            String layerNames = areaList.stream()
                    .map(HazardousAreaDto::getLayerName)
                    .distinct()
                    .collect(Collectors.joining(","));
            resultMap.put("layerNames", layerNames);

            areaList.forEach(item -> {
                if (item.getType().equals("1")) {
                    item.setName(item.getTaskDistrict().split("-")[0]);
                }
            });
            //全国地震地质灾害概况
            Map<Integer, Long> geologicalDisaster = areaList.stream()
                    .collect(Collectors.groupingBy(HazardousAreaDto::getGridvalue, Collectors.counting()));
            resultMap.put("geologicalDisaster", geologicalDisaster);

            //危险性面积
            // 先根据 gridvalue 字段进行分组，再根据 province 字段分组，计算每个分组的 area 总和
            Map<Integer, Map<String, Double>> collect = areaList.stream()
                    .collect(Collectors.groupingBy(HazardousAreaDto::getGridvalue,
                            Collectors.groupingBy(HazardousAreaDto::getName, Collectors.summingDouble(HazardousAreaDto::getArea))));
            // 计算总的 area
            double totalArea = areaList.stream()
                    .mapToDouble(HazardousAreaDto::getArea)
                    .sum();

            //危险性面积
            HashMap<Integer, Object> riskMap = new HashMap<>();
            collect.forEach((gridValue, provinceMap) -> {
                ArrayList<HazardousAreaVo> hazardousAreaList = new ArrayList<>();
                provinceMap.forEach((province, areaSum) -> {
                    HazardousAreaVo hazardousAreaVo = new HazardousAreaVo();
                    double percentage = (areaSum / totalArea) * 100;
                    hazardousAreaVo.setArea(BigDecimal.valueOf(areaSum).setScale(4, RoundingMode.HALF_UP).doubleValue());
                    hazardousAreaVo.setProvince(province);
                    hazardousAreaVo.setPercent(BigDecimal.valueOf(percentage).setScale(4, RoundingMode.HALF_UP).doubleValue());
                    hazardousAreaList.add(hazardousAreaVo);
                });
                riskMap.put(gridValue, hazardousAreaList);
            });
            resultMap.put("hazardousArea", riskMap);
        }
        return resultMap;
    }

    /**
     * 字符串逗号分隔加引号
     *
     * @param str
     * @return
     */
    public static String spilt(String str) {
        StringBuffer sb = new StringBuffer();
        String[] temp = str.split(",");
        for (int i = 0; i < temp.length; i++) {
            if (!"".equals(temp[i]) && temp[i] != null)
                sb.append("'" + temp[i] + "',");
        }
        String result = sb.toString();
        String tp = result.substring(result.length() - 1, result.length());
        if (",".equals(tp)) {
            return result.substring(0, result.length() - 1);
        } else {
            return result;
        }
    }

    @Override
    public List<SurfaceRuptureZoneVo> getSurfaceRuptureZone() {
        return eqLandslideRepository.getSurfaceRuptureZone();
    }

    @Override
    public RestResponse uploadSingleBvInformation(MultipartFile file) throws Exception {
        ExcelReader reader = ExcelUtil.getReader(file.getInputStream());
        List<Object> title = reader.readRow(0);
        String titleName = "[序号, 省份, 参考位置, 经度, 纬度, 震级, 地表破裂带长度(KM), 发震时间]";
        System.out.println("title.toString() = " + title.toString());
        if (!titleName.equals(title.toString())) {
            return RestResponse.fail("导入失败，请检查表头是否改动");
        }
        List<List<Object>> read = reader.read(2, reader.getRowCount());
        if (read.size() == 0) {
            return RestResponse.fail("导入失败，表格中无数据");
        }
        //数据校验
        RestResponse rs = check(read.size(), read);
        if (rs.getCode() != 200) {
            return RestResponse.fail(rs.getMessage());
        }
        for (List<Object> objects : read) {
            HistoricalSurfaceEntity entity = new HistoricalSurfaceEntity();
            entity.setId(UUIDGenerator.getUUID());
            entity.setProvince(objects.get(1).toString());
            entity.setRegion(objects.get(2).toString());
            entity.setLongitude(objects.get(3).toString());
            entity.setLatitude(objects.get(4).toString());
            entity.setMagnitude(objects.get(5).toString());
            entity.setLength(objects.get(6).toString());
            SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            String str = objects.get(7).toString();
            Date parsedDate = dateFormat.parse(str);
            entity.setTime(parsedDate);
            eqLandslideRepository.save(entity);
        }
        return RestResponse.succeed("上传成功!");
    }

    private RestResponse check(int size, List<List<Object>> data) {
        for (int i = 0; i < size; i++) {
            List<Object> objects = data.get(i);
            if (StringUtils.isBlank(objects.get(1).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{省份}不能为空！");
            }
            if (StringUtils.isBlank(objects.get(2).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{参考位置}不能为空！");
            }
            if (StringUtils.isBlank(objects.get(3).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{经度}不能为空！");
            }
            if (StringUtils.isBlank(objects.get(4).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{纬度}不能为空！");
            }
            if (StringUtils.isBlank(objects.get(5).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{震级}不能为空！");
            }
            if (StringUtils.isBlank(objects.get(6).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{地表破裂带长度}不能为空！");
            }
            if (StringUtils.isBlank(objects.get(7).toString())) {
                return RestResponse.fail("第" + (i + 1) + "行的{发震时间}不能为空！");
            }
        }
        return RestResponse.succeed();
    }


    @Override
    public Map<String, List<StatisticsAreaVO>> getStatisticsAreaForExport(String intensity, String modelId, String taskId, String modelType) {
        String tableName = "dzzh_count_area_result_other";
        if("地震滑坡-逻辑斯蒂模型".equals(modelType) || "软土震陷".equals(modelType) || "砂土液化-层次分析法".equals(modelType)){
            tableName = "dzzh_count_area_result";
        }
        Map<String, List<StatisticsAreaVO>> map = new HashMap<>();
        List<CountAreaResult> countAreaResultList = eqLandslideRepository.getStatisticsArea(intensity,"",modelId,modelType,tableName);
        if(PlatformObjectUtils.isNotEmpty(countAreaResultList) && countAreaResultList.size()>0){
            //按照省统计危险性面积
            Map<String, Map<String, List<CountAreaResult>>> pMap = countAreaResultList.stream().filter(countAreaResult->"0".equals(countAreaResult.getType())).collect(Collectors.groupingBy(CountAreaResult::getName, Collectors.groupingBy(CountAreaResult::getGridvalue)));
            List<StatisticsAreaVO> pList = build(pMap, "0");
            map.put("0",pList);
            Map<String, Map<String, List<CountAreaResult>>> cityMap = countAreaResultList.stream().filter(countAreaResult->"1".equals(countAreaResult.getType())).collect(Collectors.groupingBy(CountAreaResult::getName, Collectors.groupingBy(CountAreaResult::getGridvalue)));
            List<StatisticsAreaVO> centerList = build(cityMap,"1");
            TaskEntity taskEntity = taskService.getTaskWithOutGeom(taskId);
            String taskDistrict = taskEntity.getTaskDistrict();
            //按市
            Table<String, String, String> table = getTable();
            Map<String, String> areaMap = getMap();
            for (StatisticsAreaVO countAreaResult : centerList) {
                //市级数据
                Map<String, String> row = table.row(countAreaResult.getCity());
                Iterator<String> iterator = row.values().iterator();
                List<String> list = new ArrayList<>();
                while(iterator.hasNext()){
                    String next = iterator.next();
                    String parentName = areaMap.get(next);
                    list.add(parentName);
                }
                String parentName = "";
                if(list.size()>1){
                    //表示市存在重名 但是分属不同的上级 比如
                    for (String name : list) {
                        if(taskDistrict.contains(name)){
                            parentName = name;
                            break;
                        }
                    }
                }else{
                    if(list.size()==1){
                        parentName = list.get(0);
                    }
                }
                countAreaResult.setProvince(parentName);
            }
            map.put("1",centerList);
            //按区县
            Map<String, Map<String, List<CountAreaResult>>> countyMap = countAreaResultList.stream().collect(Collectors.groupingBy(CountAreaResult::getName, Collectors.groupingBy(CountAreaResult::getGridvalue)));
            List<StatisticsAreaVO> countyList = build(countyMap,"2");
            for (StatisticsAreaVO countAreaResult : countyList) {
                //区县级数据
                countAreaResult.setCity(getSuperior(table,countAreaResult.getCounty(),areaMap,taskDistrict));
                countAreaResult.setProvince(getSuperior(table,countAreaResult.getCity(),areaMap,taskDistrict));
            }
            map.put("2",countyList);
        }
        return map;
    }

    @Override
    public List<StatisticsAreaVO> getStatisticsArea(String intensity, String type, String modelId, String taskId, String modelType) {
        String tableName = "dzzh_count_area_result_other";
        if("地震滑坡-逻辑斯蒂模型".equals(modelType) || "软土震陷".equals(modelType) || "砂土液化-层次分析法".equals(modelType)){
            tableName = "dzzh_count_area_result";
        }
        List<CountAreaResult> countAreaResultList = eqLandslideRepository.getStatisticsArea(intensity,type,modelId,modelType,tableName);
        if(PlatformObjectUtils.isNotEmpty(countAreaResultList) && countAreaResultList.size()>0){
              if("0".equals(type)){
                  //按照省统计危险性面积
                  Map<String, Map<String, List<CountAreaResult>>> provinceMap = countAreaResultList.stream().collect(Collectors.groupingBy(CountAreaResult::getName, Collectors.groupingBy(CountAreaResult::getGridvalue)));
                  return build(provinceMap, "0");
              }else if("1".equals(type)){
                  Map<String, Map<String, List<CountAreaResult>>> provinceMap = countAreaResultList.stream().collect(Collectors.groupingBy(CountAreaResult::getName, Collectors.groupingBy(CountAreaResult::getGridvalue)));
                  List<StatisticsAreaVO> centerList = build(provinceMap,"1");
                  TaskEntity taskEntity = taskService.getTaskWithOutGeom(taskId);
                  String taskDistrict = taskEntity.getTaskDistrict();
                  //按市
                  Table<String, String, String> table = getTable();
                  Map<String, String> areaMap = getMap();
                  for (StatisticsAreaVO countAreaResult : centerList) {
                      //市级数据
                      Map<String, String> row = table.row(countAreaResult.getCity());
                      Iterator<String> iterator = row.values().iterator();
                      List<String> list = new ArrayList<>();
                      while(iterator.hasNext()){
                          String next = iterator.next();
                          String parentName = areaMap.get(next);
                          list.add(parentName);
                      }
                      String parentName = "";
                      if(list.size()>1){
                          //表示市存在重名 但是分属不同的上级 比如
                          for (String name : list) {
                              if(taskDistrict.contains(name)){
                                  parentName = name;
                                  break;
                              }
                          }
                      }else{
                          if(list.size()==1){
                              parentName = list.get(0);
                          }
                      }
                      countAreaResult.setProvince(parentName);
                  }
                  return centerList;
              }
              if("2".equals(type)){
                  Map<String, Map<String, List<CountAreaResult>>> provinceMap = countAreaResultList.stream().collect(Collectors.groupingBy(CountAreaResult::getName, Collectors.groupingBy(CountAreaResult::getGridvalue)));
                  List<StatisticsAreaVO> centerList = build(provinceMap,"2");
                  TaskEntity taskEntity = taskService.getTaskWithOutGeom(taskId);
                  String taskDistrict = taskEntity.getTaskDistrict();
                  //按区县
                  Table<String, String, String> table = getTable();
                  Map<String, String> areaMap = getMap();
                  for (StatisticsAreaVO countAreaResult : centerList) {
                      //区县级数据
                      countAreaResult.setCity(getSuperior(table,countAreaResult.getCounty(),areaMap,taskDistrict));
                      countAreaResult.setProvince(getSuperior(table,countAreaResult.getCity(),areaMap,taskDistrict));
                  }
                 return centerList;
              }
        }
        return null;
    }

    private String getSuperior(Table<String, String, String> table,String areaName, Map<String, String> areaMap,String taskDistrict){
        Map<String, String> row = table.row(areaName);
        Iterator<String> iterator = row.values().iterator();
        List<String> list = new ArrayList<>();
        while(iterator.hasNext()){
            String next = iterator.next();
            String parentName = areaMap.get(next);
            list.add(parentName);
        }
        String parentName = "";
        if(list.size()>1){
            //表示市存在重名 但是分属不同的上级 比如
            for (String name : list) {
                if(taskDistrict.contains(name)){
                    parentName = name;
                    break;
                }
            }
        }else{
            if(list.size()==1){
                parentName = list.get(0);
            }
        }
        return parentName;
    }
    private List<StatisticsAreaVO> build( Map<String, Map<String, List<CountAreaResult>>> areaMap,String type){
        List<StatisticsAreaVO> resultList = new ArrayList<>();
        areaMap.forEach((k,v)->{
            //type=0,k是省;type=1 k是市 type=2 是区县
            StatisticsAreaVO vo = new StatisticsAreaVO();
            if("0".equals(type)){
                vo.setProvince(k);
            }else if("1".equals(type)){
                vo.setCity(k);
            }else if("2".equals(type)){
                vo.setCounty(k);
            }
            v.forEach((k1,v1)->{
                if("软土震陷".equals(v1.get(0).getModelType())){
                    //k1是震陷还是不震陷 0不震陷 1震陷
                    if("0".equals(k1)){
                        vo.setBzxArea(v1.get(0).getArea());
                        vo.setBzxRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }else{
                        vo.setZxArea(v1.get(0).getArea());
                        vo.setZxRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }
                }else{
                    //k1是危险等级
                    if("1".equals(k1)){
                        //极高危险
                        vo.setEhArea(v1.get(0).getArea());
                        vo.setEhRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }else if("2".equals(k1)){
                        //高危险
                        vo.setHArea(v1.get(0).getArea());
                        vo.setHRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }else if("3".equals(k1)){
                        //中危险
                        vo.setMArea(v1.get(0).getArea());
                        vo.setMRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }else if("4".equals(k1)){
                        //低危险
                        vo.setLArea(v1.get(0).getArea());
                        vo.setLRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }else if("5".equals(k1)){
                        //极低危险
                        vo.setElArea(v1.get(0).getArea());
                        vo.setElRatio(saveTwoDecimal(v1.get(0).getPercent()));
                    }
                }

            });
            resultList.add(vo);
        });
        return resultList;
    }
    public String saveTwoDecimal(BigDecimal percent){
        return percent.setScale(2,RoundingMode.HALF_DOWN)+"%";
    }

    public Table<String,String,String> getTable(){
        Gson gson = CreateGson.createGson();
        String jsonStr = redisTemplate1.opsForValue().get(FxfzConstants.CACHE_SYS_KEY  + "areaList");
        jsonStr =(String) JSONObject.parse(jsonStr);
        List sysAreaList = gson.fromJson(jsonStr, List.class);
        if(sysAreaList==null || sysAreaList.size()==0){
            String url = BuildUrl.buildUrl() + "/sorg_dzj/getAllDivisionCode";
            Map<String, Object> paramMap = new HashMap<>(1);
            ResponseEntity<String> responseEntity = restTemplateUtil.sendHttpRequest(url, paramMap);
            sysAreaList = CreateGson.createGson().fromJson(responseEntity.getBody(), List.class);
            redisTemplate.opsForValue().set(FxfzConstants.CACHE_SYS_KEY +  "areaCodeList",CreateGson.createGson().toJson(sysAreaList));
        }
        Table<String,String,String> map = HashBasedTable.create();
        for (Object obj : sysAreaList) {
            LinkedTreeMap treeMap = (LinkedTreeMap) obj;
            map.put(treeMap.get("name").toString(),treeMap.get("uuid").toString(),treeMap.get("parentId").toString());
        }
        return map;
    }

    public Map<String,String> getMap(){
        Gson gson = CreateGson.createGson();
        String jsonStr = redisTemplate1.opsForValue().get(FxfzConstants.CACHE_SYS_KEY  + "areaList");
        jsonStr =(String) JSONObject.parse(jsonStr);
        List sysAreaList = gson.fromJson(jsonStr, List.class);
        Map<String,String> tempMap = new HashMap<>();
        for(int i=0;i<sysAreaList.size();i++){
            LinkedTreeMap map =(LinkedTreeMap) sysAreaList.get(i);
            tempMap.put(map.get("uuid").toString(),map.get("name").toString());
        }
        if(tempMap.size() > 0){
            redisTemplate.opsForValue().set(FxfzConstants.CACHE_SYS_KEY +  "areaMap", CreateGson.createGson().toJson(tempMap));
        }
        return tempMap;

    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public RestResponse parseShp(String fileId,String taskId) throws Exception{
        if (!shpFileAllHasCheck(fileId)) {
            return RestResponse.fail("shp,cpg,dbf,prj,shx未全部上传或者正在上传中，请上传完成后再继续!");
        }
        Map<String, File> fileMap = this.uniformFileName1(fileId);
        try {
            //解析shp
            ParsingResult shpData = getShpData(fileMap);
            //是否成功
            if (!shpData.getMessage().equals("ok")) {
                return RestResponse.fail(shpData.getMessage());
            }
            List<LithologyMergeResult> list = getList(shpData.getShpList(),taskId);
            //批量保存
            if(PlatformObjectUtils.isNotEmpty(list) && list.size()>0){
                eqLandslideRepository.mergeGroupBatchSave(list);
            }
            return  RestResponse.succeed(list.get(0).getLithologyVersionCode());
        } finally {
            for (File file : fileMap.values()) {
                FileUtil.del(file);
            }
        }
    }

    private List<LithologyMergeResult> getList(List<List<Object>> shpList,String taskId) throws ShpFieldException {
        String userID = PlatformSessionContext.getUserID(); // 获取当前登录用户
        List<LithologyMergeResult> arrayList = new ArrayList<>();
        //上传的shp文件为了和选择的版本数据一致手动设置一个版本
        String uuid = UUIDGenerator.getUUID();
        int index = 0;
        for (int i = 0; i < shpList.size(); i++) {
            index = 0;
            List<Object> list = shpList.get(i);
            LithologyMergeResult obj = LithologyMergeResult
                    .builder()
                    .lithologyVersionCode(uuid)
                    .createUser(userID)
                    .taskId(taskId)
                    .resultStatus("2")
                    .operateType("2")
                    .flag("2")
                    .dlmrId(UUIDGenerator.getUUID())
                    .createTime(PlatformDateUtils.getCurrentTimestamp())
                    .delFlag(YNEnum.N.toString())
                    .geom(ShpAnalyticUtil.checkGeomTypeOf(list.get(index++).toString(), GeomTypeEnum.MULTIPOLYGON))
                    .lithologyId(ShpAnalyticUtil.getStringVal(list.get(index++), "编号", true))
                    .lithologyGroupName(ShpAnalyticUtil.getStringVal(list.get(index++), "工程地质归并结果", true))
                    .build();
            arrayList.add(obj);
        }
        return arrayList;
    }
    /**
     * 解析shp文件返回shp数据
     *
     * @param fileMap 文件
     * @return
     */
    public ParsingResult getShpData(Map<String, File> fileMap) throws Exception {
        // .prj结尾用来校验是否为2000坐标系
        String prj = fileMap.get(".prj") == null ? StringUtils.EMPTY : fileMap.get(".prj").getPath();
        // .cpg文件用来获得编码格式,例如utf-8
        String cpg = fileMap.get(".cpg") == null ? StringUtils.EMPTY : fileMap.get(".cpg").getPath();
        // .shp文件用来获得数据
        String shp = fileMap.get(".shp") == null ? StringUtils.EMPTY : fileMap.get(".shp").getPath();
        return ShpAnalyticUtil.analytic(prj, cpg, shp);
    }
    /**
     * 统一文件名，提取出临时文件
     *
     * @param fileIds 文件ids，通过”,“拼接
     */
    public Map<String, File> uniformFileName1(String fileIds) throws FileNotFoundException {
        String[] idList = fileIds.split(",");
        String tempFileName = System.currentTimeMillis() + "";
        Map<String, File> fileMap = new HashMap<>();
        for (String attachId : idList) {
            AttachmentInfoEntity attach = attachmentInfoService.getAttach(attachId);
            File targetFile = new File(ResourceUtils.getURL("classpath:").getPath() + File.separator + "cache_file" + File.separator + tempFileName + "." + FileUtil.getSuffix(attach.getAttachName()));
            FileUtil.copy(new File(fileUrl + File.separator + attach.getAttachPath()), targetFile, true);
            fileMap.put("." + FileUtil.getSuffix(attach.getAttachName()), targetFile);
        }
        return fileMap;
    }

    /**
     * 检查空间文件的完整性
     *
     * @param fileIds
     * @return
     */
    public boolean shpFileAllHasCheck(String fileIds) {
        String[] split = fileIds.split(",");
        Map<String, Boolean> map = new HashMap<>();
        map.put("shp", false);
        map.put("prj", false);
        map.put("dbf", false);
        map.put("cpg", false);
        map.put("shx", false);
        for (String id : split) {
            AttachmentInfoEntity attach = attachmentInfoService.getAttach(id);
            String suffix = FileUtil.getSuffix(attach.getAttachName());
            if (map.get(suffix) != null) {
                map.put(suffix, true);
            }
        }
        for (Boolean value : map.values()) {
            if (!value) {
                return false;
            }
        }
        return true;
    }
}
