package com.sui.bigdata.sml.web.configuration;

import com.alibaba.fastjson.JSON;
import com.feidee.fdcommon.configuration.CustomConfiguration;
import com.sui.bigdata.sml.web.misc.websocket.ExperimentStatusServer;
import com.sui.bigdata.sml.web.util.Constants;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;

import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;

/**
 * @author songhaicheng
 * @date 2019/11/12 20:02
 * @description
 * @reviewer
 */
@Configuration
@DependsOn("disconfConfig")
@EnableKafka
@Slf4j
public class KafkaConfig {

    @Bean
    KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Integer, String>> kafkaListenerContainerFactory() {
        InetAddress address;
        try {
            address = InetAddress.getLocalHost();
            CustomConfiguration.setString(Constants.KAFKA_GROUP_ID, "" + address);
        } catch (UnknownHostException e) {
            log.error("Unknown host", e);
        }
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CustomConfiguration.getString(Constants.KAFKA_BROKERS));
        props.put(ConsumerConfig.GROUP_ID_CONFIG, CustomConfiguration.getString(Constants.KAFKA_GROUP_ID));
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, CustomConfiguration.getBoolean(Constants.KAFKA_AUTO_COMMIT));
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        ConsumerFactory<Integer, String> consumerFactory = new DefaultKafkaConsumerFactory<>(props);

        ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory);
        factory.setConcurrency(CustomConfiguration.getInt(Constants.KAFKA_CONCURRENCY));
        factory.getContainerProperties().setPollTimeout(CustomConfiguration.getLong(Constants.KAFKA_POLL_TIMEOUT));

        log.info("---------- kafka 启动完成 ----------");
        return factory;
    }

    @KafkaListener(topics = {"${kafka.topic}"})
    public void listen(ConsumerRecord<String, String> data, Consumer<?, ?> consumer) {
        log.debug("Kafka 接收到消息：{}", data.value());
        // 尝试解析消息，如果消息格式不对，则提交打印日志并提交 offset
        try {
            Map<String, Object> m = JSON.parseObject(data.value());
            ExperimentStatusServer.sendMessage(data.value(), String.valueOf(m.get("experiment")));
        } catch (Exception e) {
            log.error("接收到错误格式的消息（非 JSON）：" + data.value(), e);
        } finally {
            consumer.commitSync();
        }
    }

}
