package com.tgy.kafka.config;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;

/***
 * @ClassName: ConsumerConfig
 * @Description:
 * @Auther: tanggy3
 * @Date: 16:45 2022/6/25
 * @version : V1.0
 */
@Configuration
@EnableKafka
@Slf4j
public class ConsumersConfig {

    private final KafkaConfig kafkaConfig;

    @Value("${tgy.kafka.change-consumer.bootstrapServers}")
    private String bootstrapServers;

    @Value("${tgy.kafka.change-consumer.groupId}")
    private String groupId;

    @Value("${tgy.kafka.change-consumer.autoStartup}")
    private boolean autoStartup;

    @Value("${tgy.kafka.change-consumer.concurrency}")
    private int concurrency;

    @Autowired
    public ConsumersConfig(KafkaConfig kafkaConfig) {
        this.kafkaConfig = kafkaConfig;
    }

    @Bean
    public ConsumerFactory<String, String> consumerFactory() {
        Map<String, Object> consumerConfig = kafkaConfig.consumerConfig();
        consumerConfig.put(org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG, groupId);
        return new DefaultKafkaConsumerFactory<>(consumerConfig);
    }


    @Bean
    public KafkaListenerContainerFactory<?> testChangeBatchFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        Map<String, Object> consumerConfig = kafkaConfig.consumerConfig();
        consumerConfig.put(org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG, groupId);
        if (!StringUtils.isEmpty(bootstrapServers)) {
            consumerConfig.put(org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        }

        log.info("AiTaskStatusConsumer配置信息：" + JSON.toJSONString(consumerConfig));

        //factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfig));
        factory.setConsumerFactory(consumerFactory());
        factory.setBatchListener(true);
        factory.setConcurrency(concurrency);
        factory.setAutoStartup(autoStartup);
        return factory;
    }


    /**
     * 监听
     */
    @KafkaListener(topics = {"${tgy.kafka.aiops.consumer1.topic}"},  containerFactory = "multiForecastBatchFactory")
    public void consumerListener(List<ConsumerRecord<?, ?>> recordLists) {
        if (log.isDebugEnabled()) {
            log.debug("从kafka接收多指标预测结果返回数据数据数量：" + recordLists.size());
        }
        try {
            log.info("消费数据 =============== {}", recordLists);
            consumer(recordLists);
        } catch (Exception e) {
            log.info("消费出错");
            e.printStackTrace();
        }
    }

    /**
     * 处理
     */
    private void consumer(List<ConsumerRecord<?, ?>> recordLists) {
        recordLists.forEach(msgObjcet -> {
            String key = (String) msgObjcet.key();
            Optional<?> kafkaMessage = Optional.ofNullable(msgObjcet.value());
            if (kafkaMessage.isPresent()) {
                Object message = kafkaMessage.get();
                try {
                    String msg = String.valueOf(message);
                    handle(key, msg);
                } catch (Exception e) {
                    log.error("consumer error:", e);
                }
            }
        });
    }


    /**
     * 调用业务层
     * @param kafkaKey
     * @param msg
     */
    public void handle(String kafkaKey, String msg) {
        try {
            JSONObject resJson = JSON.parseObject(msg);
            if (log.isDebugEnabled()) {
                log.debug("开始处理多指标预测返回结果:" + resJson);
            }
            /**
             * 插入所有记录表
             */
            if (resJson != null && Objects.equals(resJson.get("code"), "0")) {
                /**
                 * 先不区分数据放入所有记录表中
                 */
            }
        } catch (Exception e) {
            log.error("The MultiForecastConsumer realTimeResult kafka message deals fail,kafkaKey:" + kafkaKey + ",msg:" + msg, e);
        }
    }

}
