package cn.dglydrpy.study.ssm.common.kafka;

import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import cn.dglydrpy.study.ssm.common.kafka.domain.Customer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
/**
 * Kafka消费者
 * @author dglydrpy
 
1.在项目中引入kafka-client的包
2.设置消费者基本属性
	2.1) bootstrap.servers: 指定消息对列的broker
	2.2) group.id: 指定消费者属于哪个群组
	2.3) key.deserializer: 反序列化工具
	2.4) value.deserializer: 值的反序列化工具
3.获取消费者
4.消费者订阅主题
5.轮询处理
6.设置poll
7.获取消息列表
8.遍历消息列表,获取某一个具体的消息
9.对消息进行处理
 */
public class MyConsumer {
    private Properties properties = new Properties();
    MyConsumer(){
        this.properties.put("bootstrap.servers","192.168.80.128:9092");
        this.properties.put("group.id","mykafka");
        this.properties.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
      //this.properties.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        //自定义序列化规则
        this.properties.put("value.deserializer","cn.dglydrpy.study.ssm.common.kafka.deserializer.CustomerDeserializer");
    }

//    Consumer<String,String> getConsumer(String topic){
//        Consumer<String,String> consumer = new KafkaConsumer<String, String>(this.properties);
//        consumer.subscribe(Collections.singletonList(topic));
//        return consumer;
//    }


    public static void main(String[] args) {
//        MyConsumer myConsumer = new MyConsumer();
//        Consumer<String,String> consumer = myConsumer.getConsumer("myTopic");
//        try {
//        	//轮询
//            while (true){
//                ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(100)); //单次轮询最长等待时间
//                for(ConsumerRecord<String,String> record: records){
//                    String key = record.key();
//                    String value = record.value();
//                    String topic = record.topic();
//                    long offset = record.offset();
//                    int partition = record.partition();
//                    System.out.println("key:"+key+",value:"+value+",topic:"+topic+",offset:"+offset+",partition:"+partition);
//                }
//            }
//        } catch (Exception e){
//            e.printStackTrace();
//        } finally {
//            consumer.close();
//        }
    	main2();
    }
    
    Consumer<String, Customer> getConsumer(String topic){
        Consumer<String,Customer> consumer = new KafkaConsumer<String, Customer>(this.properties);
        consumer.subscribe(Collections.singletonList(topic));
        return consumer;
    }
    public static void main2() {
        MyConsumer myConsumer = new MyConsumer();
        Consumer<String,Customer> consumer = myConsumer.getConsumer("myTopic");
        try {
            while (true){
                ConsumerRecords<String,Customer> records = consumer.poll(Duration.ofMillis(100));
                for(ConsumerRecord<String,Customer> record: records){
                    String key = record.key();
                    Customer value = record.value();
                    String topic = record.topic();
                    long offset = record.offset();
                    int partition = record.partition();
                    System.out.println("key:"+key+",value.id:"+value.getId()+",value.name"+value.getCustomerName()+",topic:"+topic+",offset:"+offset+",partition:"+partition);
                }
            }
        } catch (Exception e){
            e.printStackTrace();
        } finally {
            consumer.close();
        }
    }
}
