package hamster.csustef.acquisition.util;

import hamster.csustef.acquisition.constant.AcquisitionConstant;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

/**
 * kafka工具类
 *
 * @author yin hamaster
 * @date 2021/11/2
 * @descripe 测试kafka
 */
public class KafkaUtil {


    public static KafkaConsumer<String, String> consumer(String servers, String groupId, String topic) {
        Properties props = new Properties();
        props.put("bootstrap.servers", servers);
        props.put("group.id", groupId);
        props.put("key.serializer", StringSerializer.class.getName());
        props.put("value.serializer", StringSerializer.class.getName());
        props.put("key.deserializer", StringDeserializer.class.getName());
        props.put("value.deserializer", StringDeserializer.class.getName());

        KafkaConsumer<String, String> consumer =
                new KafkaConsumer<String, String>(props);
        consumer.subscribe(Collections.singletonList(topic));
        return consumer;
    }

    public static void main(String[] args) {
        KafkaConsumer<String, String> consumer = consumer(AcquisitionConstant.KAFKA_SERVERS, "csust", AcquisitionConstant.CS_WEATHER_TOPIC);
        try {
            while (true) {
                // 100 是超时时间（ms），在该时间内 poll 会等待服务器返回数据
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
                // poll 返回一个记录列表。
                // 每条记录都包含了记录所属主题的信息、记录所在分区的信息、记录在分区里的偏移量，以及记录的键值对。
                for (ConsumerRecord<String, String> record : records) {
//                    System.out.println(record.topic() + " " +
//                            record.partition() + " " +
//                            record.offset() + " " +
//                            record.key() + " " +
//                            record.value());
//                    FileUtil.appendUtf8String(record.value() + "\n", "/root/IdeaProjects/Flink/data/electric_200ms");
                    System.out.println(record);
                }

            }
        } finally {
            // 关闭消费者,网络连接和 socket 也会随之关闭，并立即触发一次再均衡
            consumer.close();
        }
    }
}
