package com.bblocks.common.anti_reptile.data;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.bblocks.common.anti_reptile.config.AntiReptileProperties;
import com.bblocks.common.anti_reptile.constant.AntiReptileConsts;
import com.bblocks.common.anti_reptile.data.bean.EsDslProperties;
import com.bblocks.common.anti_reptile.data.config.EsStatProperties;
import com.bblocks.common.anti_reptile.module.IpUserNumVo;
import com.bblocks.common.anti_reptile.module.RuleStatVO;
import com.bblocks.common.anti_reptile.rule.AbstractRuleData;
import com.bblocks.common.anti_reptile.rule.RuleData;
import com.bblocks.common.anti_reptile.util.ContentHolderUtil;
import lombok.Builder;
import lombok.Getter;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.util.EntityUtils;
import org.apache.lucene.util.NamedThreadFactory;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
import org.redisson.api.RBucket;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.context.annotation.Lazy;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;


import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.Resource;
import java.io.IOException;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * <p>Project: bb-anti-reptile - PlumelogDataStat</p>
 * <p>描述：统计各种请求数据
 * 1、用户数据：用户X接口日最大qps（24小时内最高的qps），然后日中位数/平均值
 *      例如:x1接口user1： 20--即非登录最大qps20，(user_per_qps=20)
 *            接口user2： 30--即非登录最大qps30，(user_per_qps=30)
 *            接口user3： 40--即非登录最大qps40，(user_per_qps=40)
 *      然后取20,30,40 中位数=30（avg，max都为此值）
 * 2、ip数据：ip非登录X接口最大qps（24小时内最高的qps）/本IP用户数，然后全部IP x接口日中位数/平均值（登录用户必须 >= 1，不足1的按1计算）
 *     例如:x1接口ip1： 25/2=12.5--即非登录最大qps25，总共有2个用户，即为：12.5次/人/ip(ip_per_qps=12.5)
 *           接口ip2： 30/3=12.5--即非登录最大qps30，总共有3个用户，即为：10次/人/ip(ip_per_qps=10)
 *           接口ip3： 30/0=30--即非登录最大qps30，总共有0个用户-按1计算，即为：30次/人/ip(ip_per_qps=30)
 *     然后取30,12.5,10 中位数=12.5（avg，max都为此值）
 *
 * 3、以上只是单日，7天数据使用再次中位数/平均值，计算得出ip/用户的接口：均值访问，最大qps
 * 4、qps可以按5秒计算，最终转化成1秒qps。
 * 5、以上统计可以优化，以计算出，ip/user avg，和max后续将以此值来控制入口的流量
 *
 * 以上中位数/平均值 ,目前均选择平均值把（sql中位数难算）
 * 6、目前完成数据分析， 不过会比较慢，需要流式计算，后期集成
 * </p>
 *
 *Plumelog数据结构：
 * plume_log_run_20240325
 * URL（访问url），USER_ID(用户ID)，appName（应用名称-为了提取user_id,本次数据统计不从gw提取），TOKEN，IP(ip)，COST(耗时)，REQ_FROM(来源，应用调用来源)，dtTime（date 时间）
 *
 *
 * @Author Zhao [125043150@qq.com]
 * @Date 2024/3/25 11:47
 * @Version 1.0
 * @since 8
 */
@ConditionalOnClass({ RedissonClient.class, RestHighLevelClient.class})
@Slf4j
//@Component
public class PlumelogDataStat {
    /**
     * es分析天，yyyyMMdd
     */
    public static final String ES_STAT_DAY_KEY = "ES_STAT_DAY";
    @Resource
    @Qualifier(AntiReptileConsts.ES_CLIENT_ANTI)
    private RestHighLevelClient client;

    @Autowired
    @Qualifier(AntiReptileConsts.JDBC_TEMP_ANTI)
    private JdbcTemplate jdbcTemplate;
    @Resource
    private AntiReptileProperties antiReptileProperties;
    @Lazy
    @Resource
    private RedissonClient redissonClient;
    @Resource
    private AbstractRuleData ruleData;
    @Resource
    private EsDslProperties esJsonProperties;
    @Resource
    private EsStatProperties esStatProperties;


    private ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(1
         ,new NamedThreadFactory("Plumelog-me-fetch-task"));


    /**
     * 启动定时器-（加锁-定时提取数据）
     */
    @PostConstruct
    public void init() {
        //启动线程，提取并计算数据
        log.info("启动线程定时分析数据(来源：Plumelog-me(es))！,预计时间：{}",getRunStartTime(antiReptileProperties));
        /*
        scheduledExecutorService.schedule(new TimerTask() {
            @Override
            public void run() {
                try {
                    allDatasAll();
                }catch (Exception e){
                    log.error("分析数据流量控制数据(来源：Plumelog-me(es))出错：",e);
                }
            }
        }, 1_000, TimeUnit.MILLISECONDS);*/

        if(esStatProperties.getStatLimitDay() > 0){
            scheduledExecutorService.scheduleAtFixedRate(new TimerTask() {
                @Override
                public void run() {
                    try {
                        allDatasAll();
                    } catch (Exception e) {
                        log.error("分析数据流量控制数据(来源：Plumelog-me(es))出错：", e);
                    }
                }
            }, getRunStartTimeSeconds(antiReptileProperties), 24 * 60 * 60, TimeUnit.SECONDS);
        }
    }



    @PreDestroy
    public void destroy() {
        scheduledExecutorService.shutdownNow();
    }

    /**
     * 分析全部天数据（如果出现慢问题 后期再优化线程缓存等）
     * 如果漏处理，会连续处理n天数据
     */
    public void allDatasAll() {
        RLock lock = redissonClient.getLock(AntiReptileConsts.BB_ANTI_PRE + RuleData.SYNC_LOCK);
        boolean lockResult = false;
        try {
            lockResult = lock.tryLock(1, 60 * 60, TimeUnit.SECONDS);//一个小时把
            if (lockResult) {
                RBucket<String> bucket = redissonClient.getBucket(AntiReptileConsts.BB_ANTI_PRE + ES_STAT_DAY_KEY);
                String targetDay = bucket.get();
                if(targetDay == null)
                    targetDay = LocalDate.now().plusDays(-1).format(DateTimeFormatter.ofPattern("yyyyMMdd"));//默认昨天

                LocalDate theDay = LocalDate.parse(targetDay,DateTimeFormatter.ofPattern("yyyyMMdd"));
                LocalDate now = LocalDate.now();

                while(theDay.isBefore(now)){//今天前的日志
                    try {
                        allDatas(theDay);
                    }catch (Exception e){
                        log.error("分析日：{} 数据异常：",theDay,e);
                    }
                    theDay = theDay.plusDays(1);
                    //设置执行日志
                    bucket.set(theDay.format(DateTimeFormatter.ofPattern("yyyyMMdd")));
                }
            }else
                log.info("未抢到锁，本次不予执行");
        }catch (Exception e){
            log.error("同步数据异常：",e);
        }finally {
            if(lockResult)
                lock.unlock();//释放
        }
        //最终再执行同步数据
        ruleData.allDatasSync(LocalDate.now().plusDays(-1));
    }

    /**
     * 分析数据
     * 后期优化下，流式计算
     * @param theDay
     * @throws Exception
     */
    private void allDatas(@NonNull LocalDate theDay) throws Exception{
        log.info("分析数据(来源：Plumelog-me(es))！,时间：{}",theDay);

        String dslJson = this.esJsonProperties.getIpDayUsers();
        int total = this.ipDayUsers(theDay, dslJson, 7);
        log.info("日：{} 共提取到最终ip用户数:{}",theDay, total);

        dslJson = this.esJsonProperties.getIpNologinAccess();
        String dslQpsJson = this.esJsonProperties.getIpNologinAccessQps();
        total = ipVisitorDayData(theDay,dslJson,dslQpsJson);
        log.info("日：{} ip访问qps:{}",theDay, total);

        dslJson = this.esJsonProperties.getUserAccess();
        dslQpsJson = this.esJsonProperties.getUserAccessQps();
        total = userVisitorDayData(theDay,dslJson,dslQpsJson);
        log.info("日：{} 用户访问qps数据:{}",theDay, total);

        /**
         * 统计平台接口访问qps数据
         */
        dslJson = this.esJsonProperties.getPlateAccess();
        dslQpsJson = this.esJsonProperties.getPlateAccessQps();
        total = plateVisitorDayData(theDay,dslJson,dslQpsJson);
        log.info("日：{} 平台访问qps数据:{}",theDay, total);


        //是否达到需要的天数，形成总数据
        setEsStats(esStatProperties.getStatLimitDay());
    }


    /**
     *  设置最终数据
     *  1、添加的第一天数据超过 now-limitDays 的数据
     * @param limitDays
     */
    private void setEsStats(int limitDays){
        LocalDate now = LocalDate.now();
        RuleStatVO ruleStatVO = null;
        try {
            ruleStatVO = jdbcTemplate.queryForObject(AntiEsStatConsts.ACCESS_SUM_FISRT_SQL,new BeanPropertyRowMapper<>(RuleStatVO.class));
        } catch (EmptyResultDataAccessException e) {}
        if (ruleStatVO != null && ruleStatVO.getDay()!= null
                 && ChronoUnit.DAYS.between(ruleStatVO.getDay(), now) >= limitDays){
            log.info("存在：{} 天以上数据，可以最新形成结果！",limitDays);

            int total = jdbcTemplate.update(AntiEsStatConsts.DELETE_ACCESS_SUM_SQL);
            log.info("清理url访问qps规则：{}",total);

            total = jdbcTemplate.update(AntiEsStatConsts.REINSERT_ACCESS_SUM_SQL,LocalDate.now().plusDays(-limitDays));
            log.info("生成url访问qps规则：{}",total);
        }else{
            log.warn("条件未达成,不能生成url访问qps规则！");
        }
    }

    /**
     * 用户访问qps数据
     * 1、
     * @param theDay
     * @param dslJson
     * @param dslQpsJson
     * @throws IOException
     * @return >=0 则成功
     */
    public int userVisitorDayData(LocalDate theDay, String dslJson,String dslQpsJson) throws IOException {
        log.info("分析统计日：{} 用户访问qps！",theDay);
        String index = "plume_log_run_"+theDay.format(DateTimeFormatter.ofPattern("yyyyMMdd"));

        int startIndex = 0;
        int total = 0;
        Map<String, Object> params = new HashMap<>();

        String lastUrl= null;
        int lastTotalNum = 0;
        double lastTotalValue = 0d;
        while(true){
            params.put("from", startIndex);
            ESAggData ret = esPageData(dslJson,params,index,"group_urluserid");
            if (ret.getStatusCode() == 200){
                total = ret.total;
                JSONArray buckets = ret.getData();
                if(!buckets.isEmpty()){
                    for (int i = 0; i < buckets.size(); i++) {
                        String key = buckets.getJSONObject(i).getString("key");
                        String[] keyArr = key.split(esStatProperties.getSpitterStr());
                        if(keyArr.length == 2 && !keyArr[0].isEmpty() && !keyArr[1].isEmpty()){
                            String url = keyArr[0];
                            String userId = keyArr[1];

                            //需要再次统计
                            params.put("url",url);
                            params.put("user_id",userId);

                            ESAggData retQps = esPageData(dslQpsJson,params,index,"group_seconds");
                            if (retQps.getStatusCode() == 200){
                                Integer accessMaxNum = retQps.getData().getJSONObject(0).getInteger("doc_count");//5秒的
                                double qps = accessMaxNum / 5.0;

                                lastTotalNum++;
                                lastTotalValue += qps;
                                if(lastUrl != null && !lastUrl.equals(url)){//保存平均值
                                    double avgQps = lastTotalValue / lastTotalNum;
                                    //写入DB
                                    int count = jdbcTemplate.update(AntiEsStatConsts.SAVE_USER_ACCESS_SQL, url, theDay,avgQps,avgQps);
                                    lastTotalNum = 0;
                                    lastTotalValue = 0d;

                                    total++;
                                }
                            }

                            lastUrl = url;
                        }
                    }
                    startIndex += buckets.size();

                }else{
                    break;
                }
            }else{
                log.error("统计url用户访问qps异常，返回码：{}",ret.getStatusCode());
                total = -1;
                break;
            }
        }
        if(total < 0)
            return total;
        //最后一轮
        if(lastTotalNum > 0){
            double avgQps = lastTotalValue / lastTotalNum;
            //写入DB
            int count = jdbcTemplate.update(AntiEsStatConsts.SAVE_USER_ACCESS_SQL, lastUrl, theDay,avgQps,avgQps);
            total++;
        }
        return total;
    }

    /**
     * ip 访问qps（暂时未找到一步统计完成的，分4步走）-未登录数据
     * 1、group by URL+'&&&'+IP;
     * 2、根据url 、ip 查询qps最大值
     * 3、统计url， ip qps的均值（本地计算）
     * 4、写入db（循环保存）
     * @param theDay
     * @param dslJson 查询IP&&&URL dsl分页模板
     * @param dslQpsJson 查询ip 最大qps dsl模板
     * @throws IOException
     * @return >=0 则成功
     */

    public int ipVisitorDayData(LocalDate theDay, String dslJson,String dslQpsJson) throws IOException {
        log.info("统计日：{} ip访问qps！",theDay);
        String index = "plume_log_run_"+theDay.format(DateTimeFormatter.ofPattern("yyyyMMdd"));

        int startIndex = 0;
        int total = 0;
        Map<String, Object> params = new HashMap<>();

        String lastUrl= null;
        int lastTotalNum = 0;
        double lastTotalValue = 0d;
        while(true){
            params.put("from", startIndex);
            ESAggData ret = esPageData(dslJson,params,index,"group_urlip");
            if (ret.getStatusCode() == 200){
                total = ret.total;
                JSONArray buckets = ret.getData();
                if(!buckets.isEmpty()){
                    for (int i = 0; i < buckets.size(); i++) {
                        String key = buckets.getJSONObject(i).getString("key");
                        String[] keyArr = key.split(esStatProperties.getSpitterStr());
                        if(keyArr.length == 2 && !keyArr[0].isEmpty() && !keyArr[1].isEmpty()){
                            String url = keyArr[0];
                            String ip = keyArr[1];

                            //需要再次统计
                            params.put("url",url);
                            params.put("ip",ip);

                            ESAggData retQps = esPageData(dslQpsJson,params,index,"group_seconds");
                            if (retQps.getStatusCode() == 200){
                                Integer accessMaxNum = retQps.getData().getJSONObject(0).getInteger("doc_count");//5秒的
                                double qps = accessMaxNum / 5.0;
                                double ip_per_qps = qps / ipUserNum(ip);

                                lastTotalNum++;
                                lastTotalValue += ip_per_qps;
                                if(lastUrl != null && !lastUrl.equals(url)){//保存平均值
                                    double avgQps = lastTotalValue / lastTotalNum;
                                    //写入DB
                                    int count = jdbcTemplate.update(AntiEsStatConsts.SAVE_IP_ACCESS_SQL, url, theDay,avgQps,avgQps);
                                    lastTotalNum = 0;
                                    lastTotalValue = 0d;

                                    total++;
                                }
                            }

                            lastUrl = url;
                        }
                    }
                    startIndex += buckets.size();
                }else{
                    break;
                }
            }else{
                log.error("统计ip访问qps异常，返回码：{}",ret.getStatusCode());
                total = -1;
                return total;
            }
        }
        //最后一轮
        if(lastTotalNum > 0){
            double avgQps = lastTotalValue / lastTotalNum;
            //写入DB
            int count = jdbcTemplate.update(AntiEsStatConsts.SAVE_IP_ACCESS_SQL, lastUrl, theDay,avgQps,avgQps);
            total++;
        }
        return total;
    }

    /**
     * 平台 访问qps（暂时未找到一步统计完成的，分4步走）
     * 1、group by URLP;
     * 2、根据url 查询qps最大值
     * 3、统计url  qps的均值（本地计算）
     * 4、写入db（循环保存）
     * @param theDay
     * @param dslJson 查询url dsl分页模板
     * @param dslQpsJson 查询url 最大qps dsl模板
     * @throws IOException
     * @return >=0 则成功
     */

    public int plateVisitorDayData(LocalDate theDay, String dslJson,String dslQpsJson) throws IOException {
        log.info("统计日：{} 平台访问qps！",theDay);
        String index = "plume_log_run_"+theDay.format(DateTimeFormatter.ofPattern("yyyyMMdd"));

        int startIndex = 0;
        int total = 0;
        Map<String, Object> params = new HashMap<>();

        String lastUrl= null;
        int lastTotalNum = 0;
        double lastTotalValue = 0d;
        while(true){
            params.put("from", startIndex);
            ESAggData ret = esPageData(dslJson,params,index,"group_url");
            if (ret.getStatusCode() == 200){
                total = ret.total;
                JSONArray buckets = ret.getData();
                if(!buckets.isEmpty()){
                    for (int i = 0; i < buckets.size(); i++) {
                        String key = buckets.getJSONObject(i).getString("key");
                            String url = key;

                            //需要再次统计
                            params.put("url",url);

                            ESAggData retQps = esPageData(dslQpsJson,params,index,"group_seconds");
                            if (retQps.getStatusCode() == 200){
                                Integer accessMaxNum = retQps.getData().getJSONObject(0).getInteger("doc_count");//5秒的
                                double plate_per_qps = accessMaxNum / 5.0;

                                lastTotalNum++;
                                lastTotalValue += plate_per_qps;
                                if(lastUrl != null && !lastUrl.equals(url)){//保存平均值
                                    double avgQps = lastTotalValue / lastTotalNum;
                                    //写入DB
                                    int count = jdbcTemplate.update(AntiEsStatConsts.SAVE_PLATE_ACCESS_SQL, url, theDay,avgQps,avgQps);
                                    lastTotalNum = 0;
                                    lastTotalValue = 0d;

                                    total++;
                                }
                            }

                            lastUrl = url;

                    }
                    startIndex += buckets.size();
                }else{
                    break;
                }
            }else{
                log.error("统计ip访问qps异常，返回码：{}",ret.getStatusCode());
                total = -1;
                return total;
            }
        }
        //最后一轮
        if(lastTotalNum > 0){
            double avgQps = lastTotalValue / lastTotalNum;
            //写入DB
            int count = jdbcTemplate.update(AntiEsStatConsts.SAVE_IP_ACCESS_SQL, lastUrl, theDay,avgQps,avgQps);
            total++;
        }
        return total;
    }


    /**
     * ip用户数量（肯定大于0）
     * 需要缓存，否则慢
     * @param ip
     * @return
     */
    private int ipUserNum(@NonNull String ip){
        RBucket<Integer> bucket = redissonClient.getBucket(AntiReptileConsts.BB_ANTI_PRE + "ip_user_num::" + ip);
        Integer ipUserNum = bucket.get();
        if(ipUserNum != null){
            return ipUserNum;
        }else{
            IpUserNumVo ipUserNumVo = null;
            try {
                 ipUserNumVo = jdbcTemplate.queryForObject(AntiEsStatConsts.IP_SUM_SQL,new BeanPropertyRowMapper<>(IpUserNumVo.class),ip);
            } catch (EmptyResultDataAccessException e) {}
            ipUserNum = !Objects.isNull(ipUserNumVo) && !Objects.isNull(ipUserNumVo.getUserNum()) ? ipUserNumVo.getUserNum() : 1;
            bucket.set(ipUserNum, 60, TimeUnit.MINUTES);
            return ipUserNum;
        }
    }

    /**
     * 统计ip人数
     * 1、统计日 ip用户数量
     * 2、统计历史n天ip用户均值
     * @param theDay index为：plume_log_run_20240328
     * @param dslJson
     * @param lastDay 最近几天（为空则不处理）
     * @return >=0 则成功
     */
    public int ipDayUsers(@NonNull LocalDate theDay,@NonNull String dslJson,Integer lastDay) throws IOException{
        log.info("统计日：{} ip用户数量！",theDay);
        String index = "plume_log_run_"+theDay.format(DateTimeFormatter.ofPattern("yyyyMMdd"));

        int startIndex = 0;
        int total = 0;
        Map<String, Object> params = new HashMap<>();

        Map<String,Integer> ipUserNumMap = new HashMap<>();
        String lastIp = null;
        int batchNo = this.antiReptileProperties.getDb().getBatchNo();
        while(true){
            params.put("from", startIndex);
            ESAggData pageData = esPageData(dslJson,params,index,"group_ipuser");
            if (pageData.getStatusCode() == 200){
                JSONArray buckets = pageData.getData();
                if(!buckets.isEmpty()){
                    for (int i = 0; i < buckets.size(); i++) {
                        String key = buckets.getJSONObject(i).getString("key");
                        String[] keyArr = key.split(esStatProperties.getSpitterStr());
                        if(keyArr.length == 2 && !keyArr[0].isEmpty() && !keyArr[1].isEmpty()){
                            String ip = keyArr[0];
                            ipUserNumMap.put(ip,ipUserNumMap.getOrDefault(ip,0)+1);
                            if (lastIp != null && !lastIp.equals(ip)
                                    && ipUserNumMap.size() >= batchNo){
                                //批量保存下
                                int ret = batchSaveIpUsers(ipUserNumMap, theDay);
                                ipUserNumMap.clear();
                            }
                            lastIp = ip;
                        }
                    }
                    startIndex += buckets.size();
                }else{
                    break;
                }
            }else{
                log.error("统计ip用户数异常，返回码：{}",pageData.getStatusCode());
                total = -1;
                break;
            }
        }
        if(total < 0)
            return total;
        if(!ipUserNumMap.isEmpty()){
            int ret = batchSaveIpUsers(ipUserNumMap, theDay);
        }
        log.info("日：{} 总计ip：{} 处理完成！",theDay,total);
        //2、统计最终的ip 用户数量（均值）
        int ret = resetIpUserNumFinal(lastDay);
        return ret >= 0 ? ret : total;
    }

    private int resetIpUserNumFinal(Integer lastDay){
        int total = -1;
        if(lastDay != null){
            total = jdbcTemplate.update(AntiEsStatConsts.DELETE_IP_SUM_SQL);
            log.info("清理最终ip日均用户数：{}",total);

            total = jdbcTemplate.update(AntiEsStatConsts.REINSERT_IP_SUM_SQL,LocalDate.now().plusDays(-lastDay));
            log.info("重新统计最终ip日均用户数：{}",total);
        }
        return total;
    }


    /**
     * 提取一页es统计数据
     * @param dslJson 模版json
     * @param params 模板参数
     * @param index 索引
     * @param aggName 统计的名称
     * @return
     * @throws IOException
     */
    private ESAggData esPageData(String dslJson,Map<String, Object> params,String index,String aggName) throws IOException{
        Request request = new Request("GET","/"+index+"/_search");
        String dslOnePage = ContentHolderUtil.replacePlaceHolder(dslJson, params);
        //log.info("dsl:{}",dslOnePage);
        request.setJsonEntity(dslOnePage);
        Response response = client.getLowLevelClient().performRequest(request);
        if (response.getStatusLine().getStatusCode() == 200){
            String responseStr = EntityUtils.toString(response.getEntity());
            JSONObject jsonObject = JSON.parseObject(responseStr);
            JSONObject aggregations = jsonObject.getJSONObject("aggregations");
            int total = -1;
            if(aggregations.getJSONObject("count") != null) {//部分查询没有总数
                total = aggregations.getJSONObject("count").getInteger("value");
            }
            JSONArray buckets = aggregations.getJSONObject(aggName).getJSONArray("buckets");

            return ESAggData.builder().data(buckets).total(total).statusCode(response.getStatusLine().getStatusCode()).build();
        }else{
            log.error("统计数据：{} 异常，返回码：{}",aggName,response.getStatusLine().getStatusCode());
            return ESAggData.builder().statusCode(response.getStatusLine().getStatusCode()).build();
        }
    }

    /**
     * 批量保存（未清理map）
     * @param ipUserNumMap
     * @param theDay
     * @return
     */
    private int batchSaveIpUsers(Map<String,Integer> ipUserNumMap,LocalDate theDay) {
        List<Object[]> batchArgs = new ArrayList<>();
        ipUserNumMap.forEach((k,v)->{
            Object[] args = new Object[]{k,v, theDay};
            batchArgs.add(args);
        });
        int[] updateCounts = jdbcTemplate.batchUpdate(AntiEsStatConsts.SAVE_IP_NUM_SQL, batchArgs);
        return updateCounts.length;
    }


    /**
     * 获取运行（明天1点：AntiReptileProperties.syncTime-1点 ）
     * @return
     */
    protected LocalDateTime getRunStartTime(AntiReptileProperties antiReptileProperties){
        return LocalDateTime.of(LocalDate.now().plusDays(1), LocalTime.of((antiReptileProperties.getSync().getSyncTime()!=null?antiReptileProperties.getSync().getSyncTime() : 2) - 1 ,0,0));
    }

    protected long getRunStartTimeSeconds(AntiReptileProperties antiReptileProperties){
        LocalDateTime nextTime = getRunStartTime(antiReptileProperties);
        Duration duration = Duration.between(LocalDateTime.now(),nextTime); // 后面减去前面

        return duration.getSeconds();
    }



    /**
     * es agg 数据
     */
    @Builder
    @Getter
    public static class ESAggData{
        /**
         * 状态：200=成功
         */
        private int statusCode;
        /**
         * 总数
         */
        private Integer total;
        /**
         * 数据
         */
        private JSONArray data;
    }
}
