package demo;

import demo.kafka.ConsumerListenerService;
import demo.kafka.ConsumerListenerService2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.config.ContainerProperties;

import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.Map;

@SpringBootApplication
public class DemoHbaseApplication {

    @Bean("hbaseConf")
    public Configuration hbaseConf(@Value("${hbase.zookeeper.quorum}") String quorum,
                                   @Value("${hbase.zookeeper.port}") String port) {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", quorum);
        conf.set("hbase.zookeeper.port", port);
        return conf;
    }


    @Bean
    public KafkaTemplate kafkaTemplate() {
        Map<String, String> producerProperties = new HashMap<>();
        producerProperties.put("bootstrap.servers", "192.168.5.129:9092,192.168.5.130:9092,192.168.5.131:9092");
        producerProperties.put("client.id", "springframework");
        producerProperties.put("acks", "-1");
        producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer");
        producerProperties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // ProducerFactory
        ProducerFactory producerFactory = new DefaultKafkaProducerFactory(producerProperties);
        return new KafkaTemplate<>(producerFactory, true);
    }

    /**
     * 消息监听容器
     * 需执行doStart()方法
     * @return
     */
//    @Bean(initMethod = "doStart")
    public ConcurrentMessageListenerContainer messageListenerContainer() {
        Map<String, String> consumerProperties = new HashMap<>();
        consumerProperties.put("bootstrap.servers", "192.168.5.129:9092,192.168.5.130:9092,192.168.5.131:9092");
        consumerProperties.put("group.id", "spring_group");
        consumerProperties.put("enable.auto.commit", "true");
        consumerProperties.put("auto.commit.interval.ms", "1000");
        consumerProperties.put("auto.offset.reset", "earliest");
        consumerProperties.put("key.deserializer", "org.apache.kafka.common.serialization.IntegerDeserializer");
        consumerProperties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // ConsumerFactory
        DefaultKafkaConsumerFactory consumerFactory = new DefaultKafkaConsumerFactory(consumerProperties);

        // 消费者容器配置信息
        ContainerProperties containerProperties = new ContainerProperties("test", "test1");
        // 自定义消息消费实现类
        containerProperties.setMessageListener(new ConsumerListenerService());

        // 消费者消息监听容器
        ConcurrentMessageListenerContainer messageListenerContainer = new ConcurrentMessageListenerContainer(consumerFactory, containerProperties);
        return messageListenerContainer;
    }

    /**
     * 监听容器工厂 配合@KafkaListener
     * @return
     */
    @Bean
    public KafkaListenerContainerFactory kafkaListenerContainerFactory() {
        Map<String, String> consumerProperties = new HashMap<>();
        consumerProperties.put("bootstrap.servers", "192.168.5.129:9092,192.168.5.130:9092,192.168.5.131:9092");
        consumerProperties.put("group.id", "spring_group");
        consumerProperties.put("enable.auto.commit", "true");
        consumerProperties.put("auto.commit.interval.ms", "1000");
        consumerProperties.put("auto.offset.reset", "earliest");
        consumerProperties.put("key.deserializer", "org.apache.kafka.common.serialization.IntegerDeserializer");
        consumerProperties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(new DefaultKafkaConsumerFactory(consumerProperties));
        factory.setConcurrency(3);
        factory.getContainerProperties().setPollTimeout(3000);
        return factory;
    }

    public static void main(String[] args) {
        SpringApplication.run(DemoHbaseApplication.class, args);
    }

}
