package com.k2data.k2app.consumer;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.k2data.k2app.common.MyConstant;
import com.k2data.k2app.domain.WarningRecord;
import com.k2data.k2app.service.RedisService;
import com.k2data.k2app.service.WarningRecordService;
import com.k2data.k2app.util.DateFormatterUtil;
import com.k2data.k2app.utils.StringUtils;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.Decoder;
import kafka.utils.VerifiableProperties;
import lombok.extern.log4j.Log4j2;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;

import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

/**
 * @author cuilibo@k2data.com.cn
 */
@Log4j2
@Component
public class KafkaService {

    @Value("${kafka.consumer.num}")
    private Integer threadNum;
    @Value("${kafka.consumer.topic}")
    private String topic;
    @Value("${kafka.consumer.topicData}")
    private String topicData;
    @Value("${kafka.zookeeper.url}")
    private String zookeeperUrl;
    @Autowired
    @Lazy
    private RedisService redisService;

    @Autowired
    private WarningRecordService warningRecordService;
    private ConsumerConnector consumer;
    private ExecutorService executorService;

    private static final Long expiresIn = 10 * 60L;
    private Cache<String, String> cache = CacheBuilder.newBuilder()
            .maximumSize(3000)
            .expireAfterWrite(expiresIn, TimeUnit.SECONDS)
            .build();

    @Autowired
    public KafkaService() {
        this.executorService = Executors.newCachedThreadPool();
    }

    public void init() {
        Properties props = new Properties();
        props.put("zookeeper.connect", this.zookeeperUrl);
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "largest");
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "consumer-k2asset");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        this.consumer = Consumer.createJavaConsumerConnector(new kafka.consumer.ConsumerConfig(props));
    }

    public void consumeTopic() {
        Map<String ,Integer> topicCountMap = new HashMap<>(16);
        topicCountMap.put(this.topic, this.threadNum);
        topicCountMap.put(this.topicData, this.threadNum);

        Decoder<String> keyDecoder = new kafka.serializer.StringDecoder(new VerifiableProperties());
        Decoder<String> valueDecoder = new kafka.serializer.StringDecoder(new VerifiableProperties());

        Map<String, List<KafkaStream<String, String>>> map = this.consumer.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);

        List<KafkaStream<String, String>> kafkaStreams = map.get(this.topic);

        for (KafkaStream<String, String> kafkaStream : kafkaStreams) {
            this.executorService.submit(runnable(kafkaStream.iterator()));
        }

        List<KafkaStream<String, String>> kafkaStreamsData = map.get(this.topicData);

        for (KafkaStream<String, String> kafkaStream : kafkaStreamsData) {
            this.executorService.submit(runnableData(kafkaStream.iterator()));
        }
    }

    /**
     * 检测异常区
     * @param iterator
     * @return
     */
    private Runnable runnable(ConsumerIterator<String, String> iterator) {
        return () -> {
            try {
                while (iterator.hasNext()) {
                    //有异常数据
                    //1 插入一条告警信息，提示有异常数据（30分钟之内的异常数据不重复告警）
                    System.out.println("检测到异常数据，插入告警记录, ts:检测时间,desc:检测到异常数据，时间标签：已经处理， deal-time:处理时间");
                    WarningRecord lastOne = warningRecordService.getLastOne();
                    if(lastOne != null){
                        // 距离上次报警超过1天，继续发送告警
                        if(LocalDateTime.now().minusDays(1).isAfter(lastOne.getWarningTime())){
                            insertExceptionWarning();
                        }
                    } else {
                        insertExceptionWarning();
                    }


                }
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        };
    }

    /**
     * 检测实时数据
     * @param iterator
     * @return
     */
    private Runnable runnableData(ConsumerIterator<String, String> iterator) {
        return () -> {
            try {
                while (iterator.hasNext()) {

                    // 有实时数据
                    // 查询数据库中最后一条记录，如果类型是:告警中，则添加一条正常记录
                    // 将redis中最新接入时间更新为当前时间
                    Map map = redisService.get("defaultSpaceRaw");
                    Object isNormal = map.get("isNormal");
                    LocalDateTime ts = null;
                    MessageAndMetadata<String, String> messageAndMetadata = iterator.next();
                    JSONObject object = JSON.parseObject(messageAndMetadata.message());
                    JSONObject jsonObjectTime = object.getJSONObject("sampleTime");
                    Long timestamp = jsonObjectTime.getLong("timestamp");

                    if(timestamp==null){
                        ts = parseTimeToLocalDateTime(jsonObjectTime.getString("iso"));
                    }else{
                        ts = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp), ZoneId.systemDefault());
                    }
                    if(!StringUtils.isEmpty(isNormal.toString()) && "no".equals(isNormal.toString())){
                        WarningRecord warningRecord = new WarningRecord();
                        warningRecord.setServerName("发数服务");
                        warningRecord.setWarningTime(ts);
                        warningRecord.setStatus("normal");
                        warningRecord.setMessage("恢复正常");
                        warningRecordService.add(warningRecord);
                    }

                    System.out.println("redis.set  写入redis中数据");
                    map.put("ts", DateFormatterUtil.parseToString(ts));
                    map.put("isNormal", "yes");
                    map.put("isEmail", "no");
                    redisService.set("defaultSpaceRaw", map);
                }
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        };
    }

    public void shutdown() {
        this.executorService.shutdown();
    }


    public static  LocalDateTime parseTimeToLocalDateTime(String iso) {
        LocalDateTime localDateTime = LocalDateTime.of(Integer.parseInt(iso.substring(0,4)),
                Integer.parseInt(iso.substring(5,7)),
                Integer.parseInt(iso.substring(8,10)),
                Integer.parseInt(iso.substring(11,13)),
                Integer.parseInt(iso.substring(14,16)),
                Integer.parseInt(iso.substring(17,19))).plusHours(8);
        return localDateTime;
    }

    public void insertExceptionWarning(){
        WarningRecord warningRecord = new WarningRecord();
        warningRecord.setLastAcceptTime(LocalDateTime.now());
        warningRecord.setWarningTime(LocalDateTime.now());
        warningRecord.setDealType("");
        warningRecord.setServerName("异常检测");
        warningRecord.setStatus("fault");
        warningRecord.setMessage("接收到异常数据");
        warningRecordService.add(warningRecord);
    }

}
