package com.vcg.kafka;

import lombok.extern.java.Log;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.PartitionOffset;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import org.springframework.kafka.support.serializer.JsonSerializer;

import java.util.HashMap;
import java.util.Map;
import java.util.Random;

/**
 * Created by wuyu on 2016/9/20.
 */
@EnableKafka
@SpringBootApplication
@Log
public class KafkaApplication {
    public static void main(String[] args) {
        SpringApplication.run(KafkaApplication.class, args);
    }

    @Bean
    public ProducerFactory<Integer, String> producerFactory() {
        return new DefaultKafkaProducerFactory<>(producerConfigs());
    }

    @Bean
    public Map<String, Object> producerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put("bootstrap.servers", "localhost:9092");
        props.put("group.id", "test");
        props.put("enable.auto.commit", "true");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        return props;
    }

    @Bean
    public KafkaTemplate kafkaTemplate() {
        KafkaTemplate<Integer, String> kafkaTemplate = new KafkaTemplate<>(producerFactory());
        kafkaTemplate.setMessageConverter(new StringJsonMessageConverter());
        return kafkaTemplate;
    }


//
//    Random rnd = new Random();
//    int events=100;
//
//    // 设置配置属性
//    Properties props = new Properties();
//		props.put("metadata.broker.list","172.168.63.221:9092,172.168.63.233:9092,172.168.63.234:9092");
//		props.put("serializer.class", "kafka.serializer.StringEncoder");
//    // key.serializer.class默认为serializer.class
//		props.put("key.serializer.class", "kafka.serializer.StringEncoder");
//    // 可选配置，如果不配置，则使用默认的partitioner
//		props.put("partitioner.class", "com.catt.kafka.demo.PartitionerDemo");
//    // 触发acknowledgement机制，否则是fire and forget，可能会引起数据丢失
//    // 值为0,1,-1,可以参考
//    // http://kafka.apache.org/08/configuration.html
//		props.put("request.required.acks", "1");
//    ProducerConfig config = new ProducerConfig(props);
//
//    // 创建producer
//    Producer<String, String> producer = new Producer<String, String>(config);
//    // 产生并发送消息
//    long start=System.currentTimeMillis();
//		for (long i = 0; i < events; i++) {
//        long runtime = new Date().getTime();
//        String ip = "192.168.2." + i;//rnd.nextInt(255);
//        String msg = runtime + ",www.example.com," + ip;
//        //如果topic不存在，则会自动创建，默认replication-factor为1，partitions为0
//        KeyedMessage<String, String> data = new KeyedMessage<String, String>(
//                "page_visits", ip, msg);
//        producer.send(data);
//    }
//		System.out.println("耗时:" + (System.currentTimeMillis() - start));
//    // 关闭producer
//		producer.close();
}
