package com.lagou.kafka.demo.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;

/**
 * @param
 * @author oyz
 * @version 1.0.4
 * @description: 消费者
 * @date 2022/6/11 15:22
 */
public class Mykafka {

    public static void main(String[] args) {
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.106.130:9092");
        properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"mygrp1");
        // 如果在kafka中找不到偏移量，则设置为最旧的
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        // 不设置系统自动给我们生成
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"myc;ient");
        // 配置拦截器
        properties.setProperty(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG,"com.lagou.kafka.demo.interceptor.OneInterceptor");


        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(properties);
        consumer.subscribe(Collections.singleton("tp_demo_01"));
        while(true){
            ConsumerRecords<String,String> records = consumer.poll(3000);
            records.forEach(record ->{
                System.out.println(record.topic()+
                "\t"+record.key()+
                "\t"+record.partition()+
                "\t"+record.value()        );
            });
        }



    }
}
