package com.zork.data.generator.service.impl;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSONObject;
import com.zork.data.generator.cache.Cache;
import com.zork.data.generator.config.MockDataConfig;
import com.zork.data.generator.model.CheckItem;
import com.zork.data.generator.model.CmdbTopo;
import com.zork.data.generator.service.MockData;
import com.zork.data.generator.utils.AvroSerializerFactory;
import com.zork.data.generator.utils.DateUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.protocol.types.Field;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.text.DecimalFormat;
import java.util.*;

/**
 * @author xiesen
 */
@Service
@Slf4j
public class MockDataImpl implements MockData {

    @Resource
    KafkaTemplate<String, Object> kafkaTemplate;

    @Resource
    KafkaTemplate<String, Object> kafkaStringTemplate;

    @Resource
    MockDataConfig mockDataConfig;

    @Override
    public void log() {
        if (mockDataConfig.getEnable()) {
            final List<CmdbTopo> assetsCmdbCache = Cache.getInstance().getAssetsCmdbCache();
            if (CollUtil.isNotEmpty(assetsCmdbCache)) {
                for (CmdbTopo cmdbTopo : assetsCmdbCache) {
                    final Map<String, String> dimensions = getRandomDimensions(cmdbTopo);
                    final Map<String, String> normalFields = getRandomNormalFields();
                    final Map<String, Double> measures = getRandomMeasures();
                    String logTypeName = randomLogSetName();
                    String timestamp = DateUtil.getUTCTimeStr();
                    String source = randomSource();
                    String offset = randomOffset();

                    byte[] bytes = AvroSerializerFactory.getLogAvroSerializer().serializingLog(logTypeName, timestamp, source,
                            offset, dimensions, measures, normalFields);
                    kafkaTemplate.send(mockDataConfig.getLogTopic(), bytes);
                }
            } else {
                log.warn("缓存的 cmdb topo 信息为空.");
            }
        } else {
            log.warn("未开启模拟日志数据");
        }
    }

    @Override
    public void metric() {
        if (mockDataConfig.getEnable()) {
            final List<CmdbTopo> assetsCmdbCache = Cache.getInstance().getAssetsCmdbCache();
            if (CollUtil.isNotEmpty(assetsCmdbCache)) {
                for (CmdbTopo cmdbTopo : assetsCmdbCache) {
                    for (String metricSetName : METRIC_SET_LIST) {
                        final Map<String, String> dimensions = getRandomDimensions(cmdbTopo, metricSetName);
                        String timestamp = DateUtil.getCurrentTimestamp();
                        Map<String, Double> metrics = getRandomMetrics(metricSetName);
                        String s = metricData2Influxdb(metricSetName, timestamp, dimensions, metrics);
                        kafkaStringTemplate.send(mockDataConfig.getInfluxTopic(), s);

                        byte[] bytes = AvroSerializerFactory.getMetricAvroSerializer().serializingMetric(metricSetName, timestamp
                                , dimensions, metrics);
                        kafkaTemplate.send(mockDataConfig.getMetricTopic(), bytes);
                    }
                }
            } else {
                log.warn("缓存的 cmdb topo 信息为空.");
            }
        } else {
            log.warn("未开启模拟指标数据");
        }
    }

    @Override
    public void noDataMetric() {
        if (mockDataConfig.getEnableNoData()) {
            final List<CheckItem> checkItemList = Cache.getInstance().getNoDataMonitorCache();
            if (CollUtil.isNotEmpty(checkItemList)) {
                for (CheckItem checkItem : checkItemList) {
                    String ip = checkItem.getIp();
                    String metricSetName = checkItem.getMetricSetName();
                    Map<String, String> dimensions = getRandomDimensions(ip);
                    Map<String, Double> metrics = getRandomMetrics(metricSetName);
                    String timestamp = DateUtil.getCurrentTimestamp();
                    byte[] bytes = AvroSerializerFactory.getMetricAvroSerializer().serializingMetric(metricSetName, timestamp
                            , dimensions, metrics);
                    kafkaTemplate.send(mockDataConfig.getNoDataTopic(), bytes);
                }
            }
        }
    }

    private String metricData2Influxdb(String metricSetName, String timestamp, Map<String, String> dimensions, Map<String, Double> metrics) {
        StringBuilder builder = new StringBuilder();
        builder.append(metricSetName);

        dimensions.forEach((k, v) -> builder.append(",").append(k).append("=").append(v));
        StringBuilder metricsBuilder = new StringBuilder();
        metrics.forEach((k, v) -> metricsBuilder.append(k).append("=").append(v).append(","));
        String metricsStr = metricsBuilder.toString().substring(0, metricsBuilder.length() - 1);
        String result = builder.toString() + " " + metricsStr + " " + timestamp;
        return result;
    }

    @Override
    public void alarm() {
        if (mockDataConfig.getEnable()) {
            final List<CmdbTopo> assetsCmdbCache = Cache.getInstance().getAssetsCmdbCache();
            if (CollUtil.isNotEmpty(assetsCmdbCache)) {
                final CmdbTopo cmdbTopo = assetsCmdbCache.get(RandomUtil.randomInt(0, assetsCmdbCache.size() - 1));
                JSONObject alarmJson = new JSONObject();

                alarmJson.put("expressionId", 1);

                alarmJson.put("severity", alarmRandomLevel());
                alarmJson.put("status", alarmRandomStatus());
                alarmJson.put("timestamp", DateUtil.getUTCTimeStr());

                String searchSentence = "SELECT mean(\"cores\") AS value  FROM cpu_system_metricbeat WHERE ( \"hostname\" =~ " +
                        "/\\.*/ ) AND ( \"ip\" =~ /\\.*/ ) AND ( \"appsystem\" = 'dev_test') AND time >= 1594209600000ms AND " +
                        "time < 1594209720000ms GROUP BY time(1m),\"hostname\",\"ip\",\"appsystem\" fill(null)";
                JSONObject extFieldsJson = new JSONObject();
                extFieldsJson.put("uuid", RandomUtil.randomString(32));
                extFieldsJson.put("sourSystem", "1");
                extFieldsJson.put("actionID", "0");
                extFieldsJson.put("mergeTag", "1");
                extFieldsJson.put("connectId", RandomUtil.randomString(32));
                extFieldsJson.put("eventNum", "2");
                extFieldsJson.put("alarmSuppress", "alarmSuppress");
                extFieldsJson.put("alarmWay", "2,2,2");
                extFieldsJson.put("successFlag", "1");
                extFieldsJson.put("expressionId", "2");
                extFieldsJson.put("alarmtime", DateUtil.getUTCTimeStr());
                extFieldsJson.put("calenderId", "1");
                extFieldsJson.put("reciTime", String.valueOf(System.currentTimeMillis()));
                extFieldsJson.put("alarmDetailType", "1");
                extFieldsJson.put("revUsers", revUsers());
                extFieldsJson.put("searchSentence", searchSentence);
                alarmJson.put("extFields", extFieldsJson);
                Map<String, String> sourceMap = getRandomDimensions(cmdbTopo);
                alarmJson.put("sources", sourceMap);
                alarmJson.put("alarmTypeName", "alarm_metric");
                final String randomAlarmTypeName = getRandomAlarmTypeName();
                if ("alarm_metric".equalsIgnoreCase(randomAlarmTypeName)) {
                    final String metricSetName = randomMetricSetName();
                    alarmJson.put("metricSetName", metricSetName);
                    alarmMetricTemplate(alarmJson, cmdbTopo.getIP(), metricSetName);
                } else {
                    final String logSetName = randomLogSetName();
                    alarmJson.put("logTypeName", logSetName);
                    alarmLogTemplate(alarmJson, cmdbTopo.getIP(), logSetName);
                }

                kafkaStringTemplate.send(mockDataConfig.getAlarmTopic(), alarmJson.toJSONString());
            } else {
                log.warn("缓存的 cmdb topo 信息为空.");
            }
        } else {
            log.warn("未开启模拟告警数据");
        }
    }

    private static String revUsers() {
        List<JSONObject> list = new ArrayList<>();
        JSONObject jsonObject1 = new JSONObject();
        jsonObject1.put("email", "yangbaifan@gtjas.com");
        jsonObject1.put("name", "yangbaifan");
        jsonObject1.put("phone", "13701687645");
        jsonObject1.put("realName", "杨白帆");
        jsonObject1.put("telphone", "13701687645");
        jsonObject1.put("userId", "369");
        jsonObject1.put("wechatid", "YangBaiFan");

        JSONObject jsonObject2 = new JSONObject();
        jsonObject2.put("email", "zhangyongjun@gtjas.com");
        jsonObject2.put("name", "zhangyongjun");
        jsonObject2.put("phone", "18817309118");
        jsonObject2.put("realName", "张永军");
        jsonObject2.put("telphone", "18817309118");
        jsonObject2.put("userId", "377");
        jsonObject2.put("wechatid", "ZhangYongJun");

        JSONObject jsonObject3 = new JSONObject();
        jsonObject3.put("email", "zhangguoxing@gtjas.com");
        jsonObject3.put("name", "zhangguoxing");
        jsonObject3.put("phone", "13751744740");
        jsonObject3.put("realName", "张国兴");
        jsonObject3.put("telphone", "13751744740");
        jsonObject3.put("userId", "401");
        jsonObject3.put("wechatid", "ZhangGuoXing");

        list.add(jsonObject1);
        list.add(jsonObject2);
        list.add(jsonObject3);
        return JSONObject.toJSONString(list);
    }

    private static final List<Integer> ALARM_LEVEL = Arrays.asList(1, 3, 4, 5);
    private static final List<Double> ALARM_LEVEL_WEIGHTS = Arrays.asList(0.2, 0.3, 0.4, 0.1);
    private static final List<String> ALARM_STATUS = Arrays.asList("PROBLEM", "NOBROKER");
    private static final List<Double> ALARM_STATUS_WEIGHTS = Arrays.asList(0.7, 0.3);
    private static List<Integer> ALARM_LEVEL_WEIGHTED_LIST = new ArrayList<>(ALARM_LEVEL.size());
    private static List<String> ALARM_STATUS_WEIGHTED_LIST = new ArrayList<>(ALARM_STATUS.size());

    static {
        for (int i = 0; i < ALARM_LEVEL.size(); i++) {
            int weight = (int) (ALARM_LEVEL_WEIGHTS.get(i) * 100);
            for (int j = 0; j < weight; j++) {
                ALARM_LEVEL_WEIGHTED_LIST.add(ALARM_LEVEL.get(i));
            }
        }

        for (int i = 0; i < ALARM_STATUS.size(); i++) {
            int weight = (int) (ALARM_STATUS_WEIGHTS.get(i) * 100);
            for (int j = 0; j < weight; j++) {
                ALARM_STATUS_WEIGHTED_LIST.add(ALARM_STATUS.get(i));
            }
        }

    }

    /**
     * 1:严重,3:重要,4:警告,5:信息
     *
     * @return int
     */
    private static int alarmRandomLevel() {
        return ALARM_LEVEL_WEIGHTED_LIST.get(new Random().nextInt(ALARM_LEVEL_WEIGHTED_LIST.size()));
    }

    private static String alarmRandomStatus() {
        return ALARM_STATUS_WEIGHTED_LIST.get(new Random().nextInt(ALARM_STATUS_WEIGHTED_LIST.size()));
    }

    private static void alarmLogTemplate(JSONObject alarmJson, String ip, String logTypeName) {
        String title = ip + " 关键字告警";
        alarmJson.put("title", title);
        alarmJson.put("content", "IP 为[" + ip + "]的日志关键词有告警,日志集名称为 " + logTypeName + "," + cn.hutool.core.date.DateUtil.format(cn.hutool.core.date.DateUtil.date(), "yyyy-MM-dd HH:mm:ss") + " 提示节点数据源断开连接内部功能号 icube" + RandomUtil.randomInt(10));
    }

    private static void alarmMetricTemplate(JSONObject alarmJson, String ip, String metricSetName) {
        String title = ip + " 指标告警";
        alarmJson.put("title", title);
        alarmJson.put("content", "IP 为[" + ip + "]的指标有告警,指标集名称为 " + metricSetName + "," + cn.hutool.core.date.DateUtil.format(cn.hutool.core.date.DateUtil.date(), "yyyy-MM-dd HH:mm:ss") + " 指标阈值超过了" + RandomUtil.randomInt(10));
    }

    private static String getRandomAlarmTypeName() {
        final boolean b = RandomUtil.randomBoolean();
        if (b) {
            return "alarm_metric";
        } else {
            return "alarm_log";
        }
    }

    private static Map<String, String> getRandomDimensions(CmdbTopo cmdbTopo) {
        Map<String, String> dimensions = new HashMap<>(5);
        dimensions.put("appsystem", cmdbTopo.getYWJC());
        dimensions.put("hostname", definingHostName(cmdbTopo.getIP()));
        dimensions.put("ip", cmdbTopo.getIP());
        dimensions.put("clustername", cmdbTopo.getMKMC());
        dimensions.put("servicename", cmdbTopo.getZUJIANMC());
        return dimensions;
    }

    private static Map<String, String> getRandomDimensions(String ip) {
        Map<String, String> dimensions = new HashMap<>(5);
        dimensions.put("appsystem", "jzjy");
        String hostName = definingHostName(ip);
        if ("default.host.com".equalsIgnoreCase(hostName)) {
            ip = "0.0.0.0";
        }
        dimensions.put("ip", ip);
        dimensions.put("hostname", hostName);
        dimensions.put("clustername", "应用服务器");
        dimensions.put("servicename", "卡园_核心业务区");
        return dimensions;
    }

    private static Map<String, String> getRandomDimensions(CmdbTopo cmdbTopo, String metricSetName) {
        Map<String, String> dimensions = new HashMap<>(5);
        switch (metricSetName) {
            case "cpu":
            case "memory":
                dimensions.put("appsystem", cmdbTopo.getYWJC());
                dimensions.put("hostname", definingHostName(cmdbTopo.getIP()));
                dimensions.put("ip", cmdbTopo.getIP());
                dimensions.put("workernodeid", "0");
                break;
            case "filesystem":
                dimensions.put("appsystem", cmdbTopo.getYWJC());
                dimensions.put("hostname", definingHostName(cmdbTopo.getIP()));
                dimensions.put("ip", cmdbTopo.getIP());
                dimensions.put("disk_name", "/data");
                dimensions.put("workernodeid", "0");
                break;
            default:
                dimensions.put("appsystem", cmdbTopo.getYWJC());
                dimensions.put("hostname", definingHostName(cmdbTopo.getIP()));
                dimensions.put("ip", cmdbTopo.getIP());
                dimensions.put("clustername", cmdbTopo.getMKMC());
                dimensions.put("servicename", cmdbTopo.getZUJIANMC());
                break;
        }

        return dimensions;
    }

    private static String definingHostName(String ip) {
        StringBuilder builder = new StringBuilder();
        if (StrUtil.isNotBlank(ip)) {
            builder.append("gtja");
            final String[] segments = ip.split("\\.");
            if (segments.length >= 4) {
                String thirdSegment = segments[2];
                String fourthSegment = segments[3];
                builder.append(thirdSegment).append("-").append(fourthSegment).append(".host.com");
            } else {
                log.info("Invalid IP address format. {}", ip);
            }
        }
        if (StrUtil.isBlank(builder.toString())) {
            return "default.host.com";
        } else {
            return builder.toString();
        }
    }

    private static Map<String, String> getRandomNormalFields() {
        Map<String, String> normalFields = new HashMap<>(1);
        normalFields.put("message", "183.95.248.189 - - [23/Jul/2020:08:26:32 +0800] \"GET " +
                "/gsnews/gsf10/capital/main/1.0?code=601618&market=SH&gs_proxy_params=eyJnc19yZXFfdHlwZSI6ImRhdGEifQ" +
                "%3D%3D HTTP/1.1\" 200 872 ");
        return normalFields;
    }

    private static Map<String, Double> getRandomMeasures() {
        Map<String, Double> measures = new HashMap<>(2);
        measures.put("functionnum", RandomUtil.randomDouble(0.1, 1.0));
        return measures;
    }

    private static String randomOffset() {
        return String.valueOf(RandomUtil.randomLong());
    }

    private static final List<String> METRIC_SET_LIST = Arrays.asList("filesystem", "cpu", "memory", "disk_IO", "icmp", "network", "process_number_count", "process_tcp_count", "load_system_mb", "netstat");
    private static final List<String> LOG_SET_LIST = Arrays.asList("audit_system", "audit_auditd",
            "default_analysis_template", "windows_winlog", "tkernel", "default_analysis_template_keyword", "dys_t_order", "dys_t_order_status", "com_raw_log", "kcbp_biz_log", "usercenter", "yhzx_jstorm_kafka_log", "dys_AGW_log", "dys_mysql_log");

    private static final List<String> LOG_SOURCE_LIST = Arrays.asList("/var/log/access.log", "/var/log/nginx.log",
            "/var/log/hadoop.log", "/var/log/flink.log", "/var/log/elasticsearch.log");


    public String randomMetricSetName() {
        return METRIC_SET_LIST.get(new Random().nextInt(METRIC_SET_LIST.size()));
    }

    public String randomLogSetName() {
        return LOG_SET_LIST.get(new Random().nextInt(LOG_SET_LIST.size()));
    }

    public String randomSource() {
        return LOG_SOURCE_LIST.get(new Random().nextInt(LOG_SOURCE_LIST.size()));
    }

    private static Map<String, Double> getRandomMetrics() {
        Map<String, Double> metrics = new HashMap<>();
        metrics.put("delay", RandomUtil.randomDouble(0.6, 1.0));
        metrics.put("cost", RandomUtil.randomDouble(2.0, 3.0));
        metrics.put("collect_time", (double) System.currentTimeMillis());
        return metrics;
    }

    private static double keepTwoDecimalPlaces(double randomValue) {
        DecimalFormat decimalFormat = new DecimalFormat("#.00");
        String formattedValue = decimalFormat.format(randomValue);
        double roundedValue = Double.parseDouble(formattedValue);
        return roundedValue;
    }

    private static Map<String, Double> getRandomMetrics(String metricSetName) {
        Map<String, Double> metrics = new HashMap<>();
        switch (metricSetName) {
            case "cpu":
                metrics.put("cpu_used_pct", keepTwoDecimalPlaces(RandomUtil.randomDouble(0.6, 1.0)));
                metrics.put("_delay", (double) RandomUtil.randomInt(1000, 2000));
                break;
            case "filesystem":
                metrics.put("disk_free", keepTwoDecimalPlaces(RandomUtil.randomDouble(100000, 200000)));
                metrics.put("disk_used_pct", keepTwoDecimalPlaces(RandomUtil.randomDouble(0.6, 1.0)));
                metrics.put("disk_total", keepTwoDecimalPlaces(RandomUtil.randomDouble(100000, 200000)));
                metrics.put("disk_used", keepTwoDecimalPlaces(RandomUtil.randomDouble(0.6, 1.0)));
                metrics.put("_delay", (double) RandomUtil.randomInt(1000, 2000));
                break;
            case "memory":
                metrics.put("memory_used_pct", keepTwoDecimalPlaces(RandomUtil.randomDouble(0.6, 1.0)));
                metrics.put("memory_actual_used_pct", keepTwoDecimalPlaces(RandomUtil.randomDouble(0.6, 1.0)));
                metrics.put("_delay", (double) RandomUtil.randomInt(1000, 2000));
                break;
            default:
                metrics.put("delay", keepTwoDecimalPlaces(RandomUtil.randomDouble(0.6, 1.0)));
                metrics.put("cost", keepTwoDecimalPlaces(RandomUtil.randomDouble(2.0, 3.0)));
                metrics.put("collect_time", (double) System.currentTimeMillis());
                metrics.put("_delay", (double) RandomUtil.randomInt(1000, 2000));
                break;
        }
        return metrics;
    }
}
