package org.niit.kafka;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;

/*

   模拟Kafka的事务，事务是一个原子性操作，指的是一组操作，要么都成功，要么都失败
      消费者 和 生产者都属于一个程序
 */
public class TransactionProgram {

    public static void main(String[] args) {

        //3调用创建生产者和消费者的方法
        KafkaProducer<String, String> producer = createProducer();
        KafkaConsumer<String, String> consumer = createConsumer();

        //由于在处理数据的时候，可能会发生异常，所以要对异常进行try-catch。在catch对事务进行放弃
        try{

            //生产者调用 initTranscations 初始化事务
            producer.initTransactions();

            //编写死循环，在循环中不断的拉取数据并进行处理，然后将处理后的数据保存到BD2_new_user
            for(;;){
                //生产者开启事务
                producer.beginTransaction();

                //定义一个Map用来保存偏移量，因为手动提交offset
                Map<TopicPartition, OffsetAndMetadata> offsetMap = new HashMap<>();

                //使用消费者 去获取旧主题中的数据
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(2));

                //循环遍历结果集
                for (ConsumerRecord<String,String> record:records){

                    String topic = record.topic();
                    long offset = record.offset();
                    int partition = record.partition();
                    //offset+1 目的是为了在顺利的情况下可以得到下一次的数据
                    offsetMap.put(new TopicPartition(topic,partition),new OffsetAndMetadata(offset+1));
                    //获取旧主题中的数据  张三,1,1994-01-01  --> 张三,男,1994-01-01 生产到 新的主题当中
                    String msg = record.value(); //张三,1,1994-01-01
                    String[] fields = msg.split(",");
                    if(fields !=null && fields.length>2){
                        String sex = fields[1];
                        if(sex.equals("1")){
                            fields[1] = "男";
                        }else if(sex.equals("0")){
                            fields[1] = "女";
                        }
                    }
                    //方式一：
                    //msg = fields[0] + "," + fields[1] +"," + fields[2]; // 张三,男,1994-01-01
                    //方式二：
                   // int i = 1 / 0; //模拟一个运行异常
                    msg = Arrays.stream(fields).collect(Collectors.joining(","));

                    ProducerRecord<String,String> data = new ProducerRecord<>("BD2_new_user",msg);
                    producer.send(data);
                }

                  //提交偏移量到事务当中
                producer.sendOffsetsToTransaction(offsetMap,"old_user");

                //提交事务
                producer.commitTransaction();

            }


        }catch (Exception e){
            e.printStackTrace();
            //放弃事务
            producer.abortTransaction();
        }


    }

    //1.创建消费者，用于消费旧主题中的数据
    private static KafkaConsumer<String,String> createConsumer(){
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"node1:9092");
        props.put(ConsumerConfig.GROUP_ID_CONFIG,"old_user");
        //配置事务的隔离级别
        props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG,"read_committed");
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");

        //将消费出来的数据进行转换 二进制---》String
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());

        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(props);

        consumer.subscribe(Arrays.asList("BD2_old_user"));

        return consumer;
    }

    //2.创建生产者，用于生产新主题中的数据
    private static KafkaProducer<String,String> createProducer(){
        //1.配置信息
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"node1:9092");
        props.put(ProducerConfig.ACKS_CONFIG,"all");
        //开启事务必须配置事务Id
        props.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"new_user");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());

        //2.创建生产者
        KafkaProducer<String,String> producer = new KafkaProducer<String, String>(props);

        return producer;

    }

}
