package com.jiesone.logger.distributecleaning.config;



import com.alibaba.fastjson.JSON;
import com.jiesone.logger.distributecleaning.entity.LoggersStatistic;
import com.jiesone.logger.distributecleaning.util.DateUtil;
import com.jiesone.logger.distributecleaning.util.ElasticSearchUtil;
import com.jiesone.logger.distributecleaning.util.SnowFlake;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.TimeoutUtils;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.*;
/*
 *@ClassName SchedulerConfig
 *@Description TODO
 *@Author tangsixiang@163.com
 *@Date 2022/1/17 11:57
 *@Version 1.0
 */

@EnableScheduling
@Component
public class SchedulerConfig {
    Logger logger=  LoggerFactory.getLogger(SchedulerConfig.class);

    @Autowired
    private JdbcTemplate jdbcTemplate;
    @Autowired
    SnowFlake  snowFlake;
    @Autowired
    StringRedisTemplate  stringRedisTemplate;

   @Scheduled(cron = "15 */2 * * * ?")
    public void   changeDataRuleRefreshTask(){
        logger.debug("---------- 刷新配置日志收集源配置-----------------");
        // 查询y有效的配置数据源
        List<Map<String, Object>> dataSource = jdbcTemplate.queryForList("SELECT source_code sourceCode,source_host sourceHost ,data_code dataCode , source_format sourceFormat, source_data  sourceData  ,save_level saveLevel ,source_flag sourceFlag FROM logger_source where  source_flag=1 ");
        stringRedisTemplate.delete(CommonRediskey.accessSource);
          //CommonRediskey.accessSourceMap.clear();
          Map<String,String>  accessSourceMap=new HashMap<>();
        for (Map<String,Object> map:dataSource){
           accessSourceMap.put(map.get("sourceCode").toString(),JSON.toJSONString(map));
            logger.debug(JSON.toJSONString(map));
        }
      // CommonRediskey.accessSourceMap.putAll(accessSourceMap);
       stringRedisTemplate.opsForHash().putAll(CommonRediskey.accessSource,accessSourceMap); //  刷新并存储Redis库中目标符合要求的

        logger.debug("---------刷新有效日志源完-------------");


        // 查询y有效的配置数据源
        List<Map<String, Object>> targetSource = jdbcTemplate.queryForList("SELECT  t.`target_id` targetId,t.target_index targetIndex  ,t.`source_id` sourceId,s.`source_code` sourceCode, t.`target_data` targetData  ,t.`target_type` targetType,t.`target_flag` targetFlag FROM `logger_target`  t LEFT JOIN `logger_source`  s ON t.`source_id`=s.`source_id` where t.target_flag=1  order by  target_flag desc   ");
        Map<String,String>   savaTargetMap=new HashMap<>();
         stringRedisTemplate.delete(CommonRediskey.savaTarget);
       //  CommonRediskey.savaTarge.clear();
        for (Map<String,Object> map:targetSource){
            savaTargetMap.put(map.get("sourceCode")+":"+map.get("targetType"),JSON.toJSONString(map));
            logger.debug(JSON.toJSONString(map));
        }
      //   CommonRediskey.savaTarge.putAll(savaTargetMap);
          stringRedisTemplate.opsForHash().putAll(CommonRediskey.savaTarget,savaTargetMap);
        logger.debug("---------刷新有效日志源完---更新清洗规则中-----------");



        /******* 仅配置 需要自定义业务 清洗加工规则数据           *******************/
         for(Map<String,Object> map:targetSource){    //针对一个数据日志源  保存一个 set中 方便集中解析

             /**
              * 缓存自定义解析规则数据   智能切分链换成
              */
            if(map.get("targetType").equals(0)){
                    //1 zhi智能切分   0 正则表达式 自定义切分
                for(int  str:  new  Integer[]{1,0}){
                    String codeRuleKey=String.format(CommonRediskey.dataRuleSet,map.get("sourceCode"),str);//获取对应的智能切分Key
                    stringRedisTemplate.delete(codeRuleKey);
                    List<Map<String, Object>> sourceCodeRule = jdbcTemplate.queryForList("SELECT  r.rule_id ruleId,   r.source_code sourceCode,    r.rule_format ruleFormat,  r.rule_pretty rulePretty, r.char_index charIndex,   r.rule_order ruleOrder     FROM `logger_data_rule` r   WHERE    source_code=? AND  rule_type=? AND  rule_flag=1 ORDER BY rule_order   ",map.get("sourceCode"),str);
                    String[] stringList=new String[sourceCodeRule.size()];
                    if(sourceCodeRule.size()>=1){
                        /*************************存在多条 rule 规则*****************************/
                               int temp=0;
                        for (Map listMap:sourceCodeRule  ) {
                            /****************************内嵌logger Rule DESC 加载  存在多条描述信息*******************************/
                          //   if(listMap.getOrDefault("ruleDesc" ,0).equals(1)){
                                 List<Map<String, Object>> ruleDesc = jdbcTemplate.queryForList("SELECT d.data_index dataIndex ,data_key dataKey ,d.`rule_id` ruleId,d.data_alias dataAlias,d.data_end dataEnd   FROM `logger_rule_desc`   d LEFT JOIN   `logger_data_rule`  r ON  d.`rule_id`=r.`rule_id` WHERE  d.`rule_id`=? AND   r.`rule_flag`=1  ORDER BY desc_order ",listMap.get("ruleId") );
                                 if(ruleDesc.size()>=1){
                                     StringBuffer  ruleKeyIndex=new StringBuffer();
                                     for (Map ruledescMap: ruleDesc  ) {
                                         ruleKeyIndex.append(ruledescMap.get("dataKey")).append("=").append(ruledescMap.get("dataIndex")).append(";");
                                     }
                                     listMap.put("ruleKeyDesc",ruleKeyIndex);
                                 }
                           // }
                            stringList[temp]=JSON.toJSONString(listMap); temp++;

                        }

                          long   tempZet=stringRedisTemplate.opsForSet().add(codeRuleKey,stringList);
                        logger.info(codeRuleKey+"---------配置自定义成功-------------"+tempZet);
                        /**************************logger切分数规则加载完成**************************************/
                    }

                }


            }





         }


       /**
        *  设置日志告警规则加载到缓存库中方便快速查询
        */
       List<Map<String, Object>> alarmList=    jdbcTemplate.queryForList("SELECT * FROM `logger_alarm_rule` where alarm_flag=1  ORDER BY alarm_flag  DESC ");
        stringRedisTemplate.delete(CommonRediskey.aleamRule);
       Map<String,String> alarmRuleMap=new HashMap<>();
       for(Map<String, Object>  map:alarmList){
           alarmRuleMap.put(map.get("source_code")+":"+map.get("alarm_type"),map.get("alarm_level").toString());
       }
           stringRedisTemplate.opsForHash().putAll(CommonRediskey.aleamRule,alarmRuleMap);

        //更新清洗规则

       List<Map<String,Object>>   list=jdbcTemplate.queryForList("SELECT  `from_index_name` fromIndex ,`to_index_name` toIndex,`from_field` fromField,`to_filed` toFiled,`filed`   FROM `logger_maching_rule`  WHERE maching_flag=1 ");
       Map<String,String> machingRuleMap=new HashMap<>();
       stringRedisTemplate.delete(CommonRediskey.machingRule);
                      for (Map<String,Object>map:list){
                          // fromIndex,fromFiled;tofiled;filed
                          String  key=map.get("toIndex").toString().toLowerCase();
                          String value=map.get("fromIndex").toString().toLowerCase()+";"+map.get("fromField")+";"+map.get("toFiled")+";"+map.get("filed");
                          machingRuleMap.put(key,value);
                      }
            stringRedisTemplate.opsForHash().putAll(CommonRediskey.machingRule,machingRuleMap);

    }


    /***
     * 根据日志获取统计数据 进行业务统计
     */
    @Scheduled(cron = "0 1 */1 * * ?")
      public void updateLoggerStatistic(){

      String sql="SELECT  t.`target_index` targetIndex,s.`source_code` sourceCode  FROM `logger_target` t, `logger_source` s  WHERE t.source_id=s.source_id  AND t.target_type=1  AND t.target_flag=1 AND  s.source_flag=1";

           List   liststatis=  jdbcTemplate.query(sql, new BeanPropertyRowMapper<LoggersStatistic>(LoggersStatistic.class));
               Iterator<LoggersStatistic> it=  liststatis.iterator();
               while (it.hasNext()){
                       LoggersStatistic  loggersStatistic= it.next();

                   String dataTime= DateUtil.getBeforeByHourTime(1);//获取当前时间上一个小时
                   String hashKey=   String.format("sourceCode:[%s]:dateDay:[%s]",loggersStatistic.getSourceCode(),dataTime);
                   Map<Object, Object> levelMap=  stringRedisTemplate.opsForHash().entries(hashKey);
                     Set<Object> set=    levelMap.keySet();
                     for (Object  tmp:set){
                  String insertSql="INSERT INTO `service_manage`.`loger_statistic`(id,`source_code`,`target_index`,`log_level`,`log_count`,`event_time`,`creat_time`,`date_houre`,`date_day`)VALUE(?,?,?,?,?, DATE_SUB(NOW(),   INTERVAL -1 HOUR),NOW(),LEFT(DATE_ADD(NOW(), INTERVAL -1 HOUR),13),LEFT(DATE_ADD(NOW(), INTERVAL -1 HOUR),10))";
                         int inset = jdbcTemplate.update(insertSql, snowFlake.nextId(), loggersStatistic.getSourceCode(), loggersStatistic.getTargetIndex(), tmp, levelMap.get(tmp));
                     logger.info("数据统计使用： 日志分区间数据量："+tmp+"------"+levelMap.get(tmp));
                      }
               }


      }


}
