package com.caine.kafka.component;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;

@Component
public class KafkaComponent {

    /**
     * 创建监听者
     */
    @Bean("preConsumer")
    public KafkaConsumer<String, String> createConsumer() {
        return new KafkaConsumer<>(consumerConfigs());
    }

    /**
     * 创建生产者
     *
     * @return
     */
    @Bean("preProducer")
    public Producer<String, String> createafkaProducer() {
        return new KafkaProducer<>(producerConfigs());
    }

    /**
     * 消费者工厂
     * @return
     */
    @Bean("kafkaListenerConsumerContainerFactory")
    public KafkaListenerContainerFactory<?> batchFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new
                ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfigs()));
//        factory.setBatchListener(true); // 开启批量监听
        factory.getContainerProperties().setPollTimeout(4000);
        return factory;
    }

    public Map<String, Object> consumerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:6666");
        String groupIdConfig = "test_consumer_group";
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupIdConfig);
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        /*
         * auto.offset.reset值含义解释
         * earliest
         * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费
         * latest
         * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产生的该分区下的数据
         * none
         * topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常
         * */
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        /**
         * 自动提交间隔时间
         */
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 2000L);
        /**
         * 连接超时
         */
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 30000L);
        String deserializerClass = "org.apache.kafka.common.serialization.StringDeserializer";
        /**
         * key序列化工具类
         */
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserializerClass);
        /**
         * value序列化工具类
         */
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializerClass);
        //检测到consumer挂掉，rebalance感知时间，不能大于session.timeou.ms的1/3时间
        props.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 30000L / 3 - 2);
        //定期关闭连接的实际，设置为永不关闭
        props.put(ConsumerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, -1);

//        props.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, kafkaProperties.getMaxPartitionFetchBytes());
        props.put("value.deserializer.encoding", "gbk");
        boolean preAuthFlag = false;
        //是否开启安全验证
        if (preAuthFlag) {
            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
            String saslJaasConfigFormat = "%s required username=\"%s\" password=\"%s\";";
            String configClass = "org.apache.kafka.common.security.scram.ScramLoginModule";
            String userName = "XXX";
            String password = "xxx";
            props.put("sasl.jaas.config", String.format(saslJaasConfigFormat, configClass, userName, password));
        }
        return props;
    }

    public Map<String, Object> producerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:6666");
        //发生错误后，消息重发的次数。
        props.put(ProducerConfig.RETRIES_CONFIG, 0);
        props.put(ProducerConfig.ACKS_CONFIG, "1");
        props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, 35000);
        //当有多个消息需要被发送到同一个分区时，生产者会把它们放在同一个批次里。该参数指定了一个批次可以使用的内存大小，按照字节数计算
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        //设置生产者内存缓冲区的大小。
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        String serializerClass = "org.apache.kafka.common.serialization.StringSerializer";
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, serializerClass);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serializerClass);
        props.put("value.serializer.encoding", "gbk");
        boolean authFlag = false;
        //是否开启安全验证
        if (authFlag) {
            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
            String saslJaasConfigFormat = "%s required username=\"%s\" password=\"%s\";";
            String configClass = "org.apache.kafka.common.security.scram.ScramLoginModule";
            String userName = "XXX";
            String password = "xxx";
            props.put("sasl.jaas.config", String.format(saslJaasConfigFormat, configClass, userName, password));
        }
        return props;
    }
}
