package cn.doitedu.kafka.transaction;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.*;

/***
 * @author hunter.d
 * @qq 657270652
 * @wx haitao-duan
 * @date 2020/11/15
 **/
public class TransactionDemo {
    public static void main(String[] args) {

        Properties props_p = new Properties();
        props_p.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"doitedu01:9092,doitedu02:9092");
        props_p.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props_p.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
        props_p.setProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"tranction_id_003");

        Properties props_c = new Properties();
        props_c.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props_c.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props_c.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "doitedu01:9092,doitedu02:9092");
        props_c.put(ConsumerConfig.GROUP_ID_CONFIG, "groupid03");
        props_c.put(ConsumerConfig.CLIENT_ID_CONFIG, "clientid");
        props_c.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");

        // 构造生产者和消费者
        KafkaProducer<String, String> p = new KafkaProducer<String, String>(props_p);
        KafkaConsumer<String, String> c = new KafkaConsumer<String, String>(props_c);
        c.subscribe(Collections.singletonList("tpc_5"));

        // 初始化事务
        p.initTransactions();

        // consumer-transform-produce 模型业务流程
        while(true){
            // 拉取消息
            ConsumerRecords<String, String> records = c.poll(Duration.ofMillis(1000L));
            if(!records.isEmpty()){
                // 准备一个hashmap来记录："分区-消费位移" 键值对
                HashMap<TopicPartition, OffsetAndMetadata> offsetsMap = new HashMap<>();
                // 开启事务
                p.beginTransaction();
                try {
                    // 获取本批消息中所有的分区
                    Set<TopicPartition> partitions = records.partitions();
                    // 遍历每个分区
                    for (TopicPartition partition : partitions) {
                        // 获取该分区的消息
                        List<ConsumerRecord<String, String>> partitionRecords = records.records(partition);
                        // 遍历每条消息
                        for (ConsumerRecord<String, String> record : partitionRecords) {
                            // 执行数据的业务处理逻辑
                            System.out.println(record.topic() + "," + record.key() + "," + record.value());
                            ProducerRecord<String, String> outRecord = new ProducerRecord<>("tpc_sink", record.key(), record.value().toUpperCase());
                            // 将处理结果写入kafka
                            p.send(outRecord);
                        }

                        // 将处理完的本分区对应的消费位移记录到 hashmap中
                        long offset = partitionRecords.get(partitionRecords.size() - 1).offset();
                        offsetsMap.put(partition,new OffsetAndMetadata(offset+1));
                    }

                    // 向事务管理器提交消费位移
                    p.sendOffsetsToTransaction(offsetsMap,"groupid");
                    // 提交事务
                    p.commitTransaction();
                }catch (Exception e){
                    // 终止事务
                    p.abortTransaction();

                }
            }
        }
    }
}
