//package com.test;
//
//
//import org.apache.kafka.clients.CommonClientConfigs;
//import org.apache.kafka.clients.consumer.ConsumerConfig;
//import org.apache.kafka.common.config.SaslConfigs;
//import org.apache.kafka.common.security.auth.SecurityProtocol;
//import org.apache.kafka.common.serialization.StringDeserializer;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.kafka.annotation.EnableKafka;
//import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
//import org.springframework.kafka.core.ConsumerFactory;
//import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
//import org.springframework.kafka.listener.SeekToCurrentErrorHandler;
//
//import javax.annotation.Resource;
//import java.util.HashMap;
//import java.util.Map;
///**
// * Copyright
// *
// * @Author WuHuang
// * @Date 2023/4/25
// */
//
//@Configuration
//@EnableKafka
//public class KafkaConfiguration {
//
//    @Resource
//   private KafkaProperties kafkaProperties;
//
//    // Add your Confluent Cloud API key and secret here
//    private final String saslJaasConfig = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"<API_KEY>\" password=\"<API_SECRET>\";";
//    private final SecurityProtocol securityProtocol = SecurityProtocol.SASL_SSL;
//    private final String saslMechanism = "PLAIN";
//    private final String bootstrap_server = "127.0.0.1:9092";
//    private final String bootstrap_server_group = "test-consumer";
//
//
//    @Bean
//    public ConsumerFactory<String, String> consumerFactory() {
//        Map<String, String> properties = kafkaProperties.getProperties();
//        Map<String, Object> configs = new HashMap<>();
//        //broker地址
//        configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrap_server);
//        //是否开启自动提交消费位移的功能
//        configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"true");
//        //自动提交消费位移的时间间隔
//        configs.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000");
//        //是否使用特定类型的Avro Reader来反序列化Avro数据
////        configs.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG,true);
//
////        configs.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
//        //鉴权
//        configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol.name());
//        configs.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
////        configs.put(SaslConfigs.SASL_JAAS_CONFIG, saslJaasConfig);
//
//        //消费者所属消费组的唯一标识
//        configs.put(ConsumerConfig.GROUP_ID_CONFIG, bootstrap_server_group);
//        //从最早的消息开始读取
//        configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
//        return new DefaultKafkaConsumerFactory<>(configs, new StringDeserializer(), new StringDeserializer());
//    }
//
//
//    @Bean
//    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
//        ConcurrentKafkaListenerContainerFactory<String, String> factory =
//                new ConcurrentKafkaListenerContainerFactory<>();
//        factory.setConsumerFactory(consumerFactory());
//        factory.setErrorHandler(new SeekToCurrentErrorHandler());
//        return factory;
//    }
//
//
//
//}
