package com.unicompayment.customkafka0_9.app;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import com.google.common.collect.Lists;

import java.util.Properties;

/**
 * @Project: customKafka0_9
 * @Author: Chenmy
 * @Time: 2023-11-29 14:19:59
 * @Desc: TODO
 */
@Component
public class MyCustomerKafka {

    @Value("${kafka.source.bootstrap.servers}")
    private String sourceBootstrapServers;
    @Value("${kafka.source.topic}")
    private String sourceTopic;
    @Value("${kafka.source.group.id}")
    private String sourceGroupId;
    @Value("${kafka.source.auto.offset.reset}")
    private String sourceAutoOffsetReset;

    @Value("${kafka.target.bootstrap.servers}")
    private String targetBootstrapServers;
    @Value("${kafka.target.topic}")
    private String targetTopic;
    @Value("${kafka.target.acks}")
    private String targetAcks;

    public void start() {
        System.out.println("java -jar xxx.jar" +
                "\n" +
                "--kafka.source.bootstrap.servers=source-bootstrap.servers\n" +
                "--kafka.source.topic=source-topic\n" +
                "--kafka.source.group.id=source-group.id\n" +
                "--kafka.source.auto.offset.reset=source-auto.offset.reset\n" +
                "\n" +
                "--kafka.target.bootstrap.servers=target-bootstrap.servers\n" +
                "--kafka.target.topic=target-topic\n" +
                "--kafka.target.acks=target-acks\n");
        // 消费Kafka配置
        Properties props = new Properties();
        //props.put("zookeeper.connect", "172.18.172.34:2181");
        props.put("bootstrap.servers", sourceBootstrapServers);
        props.put("group.id", sourceGroupId);
        props.put("auto.offset.reset", sourceAutoOffsetReset);
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Lists.newArrayList(sourceTopic));

        // Kafka 生产者配置
        Properties producerProps = new Properties();
        producerProps.put("bootstrap.servers", targetBootstrapServers);
        producerProps.put("acks", targetAcks);
        producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producerProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        Producer<String, String> producer = new KafkaProducer<>(producerProps);

        while (true) {
            // 100 是超时时间（ms），在该时间内 poll 会等待服务器返回数据
            // poll 返回一个记录列表：每条记录都包含了记"录所属主题的信息、记录所在分区的信息、记录在分区里的偏移量，以及记录的键值对"。
            ConsumerRecords<String, String> records = consumer.poll(1000);
            System.out.println("---------------" + records.count() + "---------------");
            for (ConsumerRecord<String, String> record : records) {
                // 打印
                System.out.println(record);
                // 生产者发送消息到新的 Kafka 主题
                producer.send(new ProducerRecord<>(targetTopic, record.value()));
            }
        }
    }
}
