package org.rency.crawler.integration.kafka;

import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties.Listener;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

/**
 * 描述: Kafka配置<br>
 * 最近修改时间:2017/11/29 15:46<br>
 *
 * @author CaiyuRen
 * @version V1.0
 * @since 2017/11/29 15:46
 */
@Configuration
public class KafkaConfiguration {

  @Bean
  @ConfigurationProperties(prefix = "kafka")
  public KafkaProperties kafkaProperties(){
    return new KafkaProperties();
  }

  @Bean
  public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(KafkaProperties kafkaProperties) {
    Map<String, Object> consumerProps = new HashMap<>();
    consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, String.format("%s:%s",kafkaProperties.getServerAddress(),kafkaProperties.getServerPort()));
    consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaProperties.getGroupName());
    consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, kafkaProperties.isEnableAutoCommit());
    consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, kafkaProperties.getAutoCimmitIntervalMs());
    consumerProps.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, kafkaProperties.getSessionTimeoutMs());
    consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, kafkaProperties.getAutoOffsetReset());
    consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

    ConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<String, String>(consumerProps);
    ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
    factory.setConsumerFactory(consumerFactory);
    factory.setAutoStartup(kafkaProperties.isAutoStartup());
    return factory;
  }

  @Bean
  public Listener listener() {
    return new Listener();
  }

  @Bean
  public KafkaTemplate<String, String> kafkaTemplate(KafkaProperties kafkaProperties) {
    Map<String, Object> producerProps = new HashMap<>();
    producerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, String.format("%s:%s",kafkaProperties.getServerAddress(),kafkaProperties.getServerPort()));
    producerProps.put(ProducerConfig.RETRIES_CONFIG, kafkaProperties.getRetryTimes());
    producerProps.put(ProducerConfig.BATCH_SIZE_CONFIG, kafkaProperties.getBatchSize());
    producerProps.put(ProducerConfig.LINGER_MS_CONFIG, kafkaProperties.getLingerMs());
    producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, kafkaProperties.getBufferSize());
    producerProps.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, kafkaProperties.getCompressionType());
    producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    ProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(producerProps);
    return new KafkaTemplate<String, String>(producerFactory);
  }

}
