package com.unipay.oss;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Collections;
import java.util.Properties;

/**
 * @Project: Flink17_Jvm
 * @Author: Chenmy
 * @Time: 2023-10-20 10:57:31
 * @Desc: TODO
 */

public class MyConsumer {


    public static void main(String[] args) {
        Properties cunsumerProps = new Properties();
        //cunsumerProps.put("bootstrap.servers", "172.18.177.141:9092,172.18.177.143:9092,172.18.177.144:9092");
        cunsumerProps.put("bootstrap.servers", "172.18.172.26:9092,172.18.172.27:9092,172.18.172.28:9092,172.18.172.29:9092,172.18.172.30:9092");
        cunsumerProps.put("group.id", "0110");
        cunsumerProps.put("auto.offset.reset", "earliest");
        cunsumerProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        cunsumerProps.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer =
                new KafkaConsumer<String, String>(cunsumerProps);
        consumer.subscribe(Collections.singletonList("gamb_check_push_data"));

        //配置信息
        Properties props = new Properties();
        //kafka集群的信息
        props.put("bootstrap.servers", "172.18.177.141:9092,172.18.177.143:9092,172.18.177.144:9092");
        //ack应答级别
        props.put("acks", "all");
        //KV的序列化类
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        //创建生产者对象
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);

        try {
            while (true) {
                // poll 返回一个记录列表。
                // 每条记录都包含了记录所属主题的信息、记录所在分区的信息、记录在分区里的偏移量，以及记录的键值对。
                // 100 是超时时间（ms），在该时间内 poll 会等待服务器返回数据
                ConsumerRecords<String, String> records = consumer.poll(100);
                System.out.println("---------------" + records.count() + "---------------");
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(record);
                    producer.send(new ProducerRecord<>("cmyTest", record.value()));
                }
            }
        }finally {
            consumer.close();
        }
    }


/*
    public static void main(String[] args) {
        // 消费Kafka配置
        Properties cunsumerProps = new Properties();
        cunsumerProps.put("bootstrap.servers", "172.18.177.141:9092,172.18.177.143:9092,172.18.177.144:9092");
        cunsumerProps.put("group.id", "0");
        cunsumerProps.put("acks", "all");
        //cunsumerProps.put("enable.auto.commit", "true");
        cunsumerProps.put("auto.offset.reset", "earliest");
        cunsumerProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        cunsumerProps.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        Consumer<String, String> consumer = new KafkaConsumer<String, String>(cunsumerProps);
        consumer.subscribe(Collections.singletonList("gamb_check_push_data"));

        //配置信息
        Properties props = new Properties();
        //kafka集群的信息
        props.put("bootstrap.servers", "172.18.177.141:9092,172.18.177.143:9092,172.18.177.144:9092");
        //ack应答级别
        props.put("acks", "all");
        //KV的序列化类
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        //创建生产者对象
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);
        //循环发送数据，“testTopic”是已经创建好的topic
        //for (int i = 0; i < 10; i++){
        //    producer.send(new ProducerRecord<>("gamb_check_push_data", Integer.toString(i)+"producer"));
        //}

        try {
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(100);
                System.out.println("---------------" + records.count() + "---------------");
                for (ConsumerRecord<String, String> record : records) {
                    // 打印
                    System.out.println(record);
                    // 生产者发送消息到新的 Kafka 主题
                    System.out.println(record.value());
                    producer.send(new ProducerRecord<>("cmyTest", record.value()));
                }
            }
        } finally {
            producer.close();
            consumer.close();
        }
    }
*/

    /*
    public static void main(String[] args) {
        Properties props = new Properties();
        props.put("bootstrap.servers", "172.18.172.26:9092,172.18.172.27:9092,172.18.172.28:9092,172.18.172.29:9092,172.18.172.30:9092");
        props.setProperty("group.id", "0");
        props.setProperty("enable.auto.commit", "true");
        props.setProperty("auto.offset.reset", "earliest");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        Consumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Lists.newArrayList("gamb_check_push_data"));

        for (int i = 0; i < 2; i++) {
            ConsumerRecords<String, String> records = consumer.poll(100);
            System.out.println(records.count());
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
                //consumer.seekToBeginning(new TopicPartition(record.topic(), record.partition()));
            }
        }
    }
     */
    /*
    public static void main(String[] args) {
        Properties props = new Properties();
        props.put("zookeeper.connect", "172.18.172.34:2181");
        props.put("bootstrap.servers", "172.18.172.26:9092,172.18.172.27:9092,172.18.172.28:9092,172.18.172.29:9092,172.18.172.30:9092");
        props.put("group.id", "my-consumer-group");
        props.put("auto.offset.reset", "earliest");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        consumer.subscribe(Lists.newArrayList("gamb_check_push_data"));

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }
    }
     */
}
