package com.lagou.kafka.demo;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @author oyz
 * @version 1.0.4
 * @description: 验证kafka事务的幂等性
 * @date 2023/1/10 21:44
 */
public class MyTransactional {


    public static KafkaProducer<String,String> getProducer(){
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.108.130:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.CLIENT_ID_CONFIG,"tx_producer");
        // 设置事务唯一id
        properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"tx_producer_id");
        properties.put(ProducerConfig.ACKS_CONFIG,"all");
        // 启用幂等性
        properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG,true);
        return new KafkaProducer<>(properties);
    }

    public static KafkaConsumer<String,String> getConsumer(String consumerGroupId){
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.108.130:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG,"tx_consumer");
        // 重置到最早的偏移量来寻找消息
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        // 设置消费组ID
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,consumerGroupId);
        // 不启动自动确认提交偏移量，也不需要手动确认
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);
        return new KafkaConsumer<>(properties);
    }







    public static void main(String[] args) {
        String consumerGroupId = "oyz_yyds";
        // 发送者
        KafkaProducer<String, String> producer = getProducer();
        // 消费者
        KafkaConsumer<String,String> consumer = getConsumer(consumerGroupId);
        // 初始化事务
        producer.initTransactions();
        consumer.subscribe(Collections.singleton("kingdom"));
        final ConsumerRecords<String, String> records = consumer.poll(1000);
        // 开启事务
        try {
            producer.beginTransaction();
            Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
            for (ConsumerRecord<String, String> record : records) {
                producer.send(new ProducerRecord<>("shenzhen",record.key(),record.value()),(k,v)->{
                    if(v != null){
                        throw new RuntimeException(v.getMessage());
                    }else{
                        offsets.put(new TopicPartition(record.topic(),record.partition()),
                                new OffsetAndMetadata(record.offset()+1)); // 偏移量表示下一条要提交的消息
                    }
                });
            }
            // 将消息的偏移量作为事务的一部分提交，随事务的提交和回滚(不提交偏移量)
            producer.sendOffsetsToTransaction(offsets,consumerGroupId);
            // 模拟出现异常
            int i = 1/0;
            // 提交事务
            producer.commitTransaction();
        } catch (ProducerFencedException e) {
            e.printStackTrace();
            producer.abortTransaction();
        } finally {
            producer.close();
            consumer.close();
        }
    }
}
