package com.sam.kafka.transaction;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

/**
 * @author: wm.xue
 * @date: 2021/11/15 9:46
 * @description: consumer - conversion - producer
 */
@Slf4j
public class _02_Consumer_Transaction {


    public static KafkaProducer<String, String> getProducer(){

        Map<String, Object> configs = new HashMap<>();
        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.100.17.117:9092");
        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        // 设置所有ISR副本确认
        configs.put(ProducerConfig.ACKS_CONFIG, "all");


        // 设置clientID
        configs.put(ProducerConfig.CLIENT_ID_CONFIG, "_02_Consumer_Transaction_getProducer");
        // 设置事务ID
        configs.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "tx_02_Consumer_Transaction_getProducer");
        // 开启幂等性
        configs.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);


        KafkaProducer<String, String> producer = new KafkaProducer<String, String>(configs);
        return producer;
    }

    public static KafkaConsumer<String,String> getConsumer() {
        Map<String, Object> configs = new HashMap<>();
        configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.100.17.117:9092");
        configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

        // 设置客户端ID
        configs.put(ConsumerConfig.CLIENT_ID_CONFIG,"_02_Consumer_Transaction_getConsumer");
        // 设置消费者ID
        configs.put(ConsumerConfig.GROUP_ID_CONFIG, "_02_Consumer_Transaction_getConsumer");
        // 设置消费偏移量
        configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // 不启动消费者偏移量的自动确认，也不要手动提交确认
        configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        // 只读取已经提交的消息
        configs.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(configs);
        return consumer;
    }



    public static void main(String[] args) {

        KafkaProducer<String, String> producer = getProducer();
        KafkaConsumer<String, String> consumer = getConsumer();

        // 初始化事务
        producer.initTransactions();

        // 订阅主题
        consumer.subscribe(Arrays.asList("tp_tx_01"));

        ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(3));

        // 开启事务
        producer.beginTransaction();

        try {
            Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
            for (ConsumerRecord<String, String> record : records) {
                // TODO 开始处理消息,处理完成后发送给另外一个topic
                log.error("开始处理消息：", record);
                producer.send(new ProducerRecord<>("tp_tx_02", record.key(), record.value()));
                // 设置偏移量操作
                offsets.put(new TopicPartition(record.topic(), record.partition()), new OffsetAndMetadata(record.offset() + 1));
            }

            // 将该消息的偏移量提交作为事务的一部分，随事务提交和回滚(不提交消费者偏移量)
            producer.sendOffsetsToTransaction(offsets, "_02_Consumer_Transaction_getConsumer");

            // int i =1/0;
            // 提交事务
            producer.commitTransaction();
        } catch (Exception e) {
            producer.abortTransaction();
        }finally {
//            producer.close();
//            consumer.close();
        }
    }

}
