package com.unipay.task;

import com.google.common.collect.Lists;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * @Project: Flink17_Jvm
 * @Author: Chenmy
 * @Time: 2023-10-20 10:57:31
 * @Desc: TODO
 */

public class MyConsumer {


    public static void main(String[] args) {
        // 消费Kafka配置
        Properties props = new Properties();
        props.put("zookeeper.connect", "172.18.172.34:2181");
        props.put("bootstrap.servers", "172.18.172.26:9092,172.18.172.27:9092,172.18.172.28:9092,172.18.172.29:9092,172.18.172.30:9092");
        props.put("group.id", "my-group01");
        props.put("auto.offset.reset", "earliest");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Lists.newArrayList("gamb_check_push_data"));

        // Kafka 生产者配置
        Properties producerProps = new Properties();
        producerProps.put("bootstrap.servers", "172.18.177.141:9092,172.18.177.143:9092,172.18.177.144:9092");
        producerProps.put("acks", "all");
        producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producerProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        Producer<String, String> producer = new KafkaProducer<>(producerProps);

        while (true) {
            // poll 返回一个记录列表。
            // 每条记录都包含了记录所属主题的信息、记录所在分区的信息、记录在分区里的偏移量，以及记录的键值对。
            // 100 是超时时间（ms），在该时间内 poll 会等待服务器返回数据
            ConsumerRecords<String, String> records = consumer.poll(100);
            System.out.println("---------------" + records.count() + "---------------");
            for (ConsumerRecord<String, String> record : records) {
                // 打印
                System.out.println(record);
                // 生产者发送消息到新的 Kafka 主题
                producer.send(new ProducerRecord<>("gamb_check_push_data", record.value()));
            }
        }
    }



/*
    public static void main(String[] args) {
        Properties props = new Properties();
        props.put("zookeeper.connect", "172.18.172.34:2181");
        props.put("bootstrap.servers", "172.18.172.26:9092,172.18.172.27:9092,172.18.172.28:9092,172.18.172.29:9092,172.18.172.30:9092");
        props.put("group.id", "my-consumer-group01");
        props.put("auto.offset.reset", "earliest");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        consumer.subscribe(Lists.newArrayList("gamb_check_push_data"));

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(100);
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }
    }
*/
}
