package com.galeno.demo;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.protocol.types.Field;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;


import java.time.Duration;
import java.util.*;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/9/1216:26
 */
public class ProductorAPI {
    public static void main(String[] args) {

        Properties properties_producer = new Properties();
        String serverUrl = "galeno01:9092,galeno02:9092,galeno03:9092";

        properties_producer.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, serverUrl);
        properties_producer.setProperty(ProducerConfig.ACKS_CONFIG, "all");
        properties_producer.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties_producer.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //设置事务id,一定要设置事务id
        properties_producer.setProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "t01");
        //开启幂等性
        properties_producer.setProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
        //构造生产者
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties_producer);


        Properties properties_consumer = new Properties();
        properties_consumer.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, serverUrl);
        properties_consumer.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "gp1");
        properties_consumer.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "c1");
        properties_consumer.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties_consumer.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //earliest 
        //当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费 
        //latest 
        //当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产生的该分区下的数据 
        // TODO: 2021/9/12  none值一般用于检查分区是否已经提交
        //none
        //topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常
        properties_consumer.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        properties_consumer.setProperty(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, RangeAssignor.class.getName());
        //构造一个读取数据源的消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties_consumer);

        //订阅一个主题
        consumer.subscribe(Collections.singletonList("liu"));
        //初始化事务
        producer.initTransactions();

        boolean flag = true;
        while (flag) {
            try {
                //开始事务
                producer.beginTransaction();
                //拉取数据
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(5000));
                if(records.isEmpty()) break;
                //获取本次拉取到数据具体的分区

                Set<TopicPartition> partitions = records.partitions();
                //构建一个hashmap记录每个分区处理到的数据偏移量

                HashMap<TopicPartition, OffsetAndMetadata> offsetAndMetadataHashMap = new HashMap<>();

                for (TopicPartition partition : partitions) {
                    // 从records中获取当前遍历到的分区的所有数据
                    List<ConsumerRecord<String, String>> recordList = records.records(partition);
                    for (ConsumerRecord<String, String> consumerRecord : recordList) {
                        String res = consumerRecord.value().toUpperCase();
                        //数据写出
                        ProducerRecord<String, String> tpc6 = new ProducerRecord<>("tpc6", consumerRecord.key(), res);
                        producer.send(tpc6);

                    }

                offsetAndMetadataHashMap.put(partition,new OffsetAndMetadata(recordList.get(recordList.size()-1).offset()+1));

                }
                producer.sendOffsetsToTransaction(offsetAndMetadataHashMap,"gp1");
                //提交事务
                producer.commitTransaction();




            } catch (Exception e) {

            }


        }


    }

}
