package com.br.aiops.receiver.es.config;

import com.alibaba.fastjson.JSONObject;
import com.br.aiops.receiver.es.config.db.ConfigService;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

/**
 * 配置缓存类
 */
@Component
public class ConfigCache {

    private static final Logger LOGGER = LoggerFactory.getLogger(ConfigCache.class);

    @Autowired
    private ConfigService configService;

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Value("${spring.kafka.consumer.partition-count}")
    private Integer partitionCount;

    @Value("${spring.kafka.consumer.es-config-topic}")
    private String esConfigTopic;

    private Map<Long/*config id*/, Config> configs = new ConcurrentHashMap<>();

    @KafkaListener(topics = "${spring.kafka.consumer.es-config-topic}")
    public void startConsume(ConsumerRecord<String, String> consumer) {
        String json = consumer.value();
        Config config = JSONObject.parseObject(json, Config.class);
        config.setCacheTime(new Date());
        configs.put(config.getId(), config);
    }

    public void remove(Long id) {
        configs.remove(id);
    }

    public boolean containsKey(Long id) {
        return configs.containsKey(id);
    }

    public Config get(Long id) {
        return configs.get(id);
    }

    /**
     * 获取所有配置
     * 如果configs为空，则执行load方法加载
     * @return configs
     */
    public List<Config> getConfigs() {
        if (configs.size() == 0) {
            loadAndSend();
        }
        return new ArrayList<>(configs.values());
    }

    /**
     * load configs and send config json message to kafka
     */
    public synchronized void loadAndSend() {
        //清除过期配置（指禁用、或不指派的配置，过期时间为90s）
        configs.values().stream()
                .filter(config -> config.getCacheTime().before(DateUtils.addSeconds(new Date(), -90)))
                .map(Config::getId)
                .forEach(id -> configs.remove(id));

        configService.getConfigs().forEach(config -> {
            int partition = (int) (config.getId() % partitionCount);
            kafkaTemplate.send(esConfigTopic, partition, config.getId().toString(), JSONObject.toJSONString(config));
        });
    }

    /**
     * 每30s刷新一次，整点执行
     */
    @Scheduled(fixedRate = 30 * 1000)
    private void refresh() {
        loadAndSend();
        LOGGER.debug("刷新configs完成，刷新后的configs为：{}。", configs);
    }
}
