package com.niit.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;

import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class TransactionProgram {

    public static void main(String[] args) {
        // 1. 调用之前实现的方法，创建消费者、生产者对象
        KafkaConsumer<String, String> consumer = createConsumer();
        KafkaProducer<String, String> producer = createProducer();

        try{

            // 2. 生产者调用initTransactions初始化事务
            producer.initTransactions();

            //3.编写死循环，在循环中不断的拉去数据并进行处理，然后将处理后的数据生产到BD1_new_user
            while (true){
                //生产者开启事务
                producer.beginTransaction();

                //定义一个Map用来保存偏移量offset，因为手动提交offset
                Map<TopicPartition, OffsetAndMetadata> offsetMap = new HashMap<>();

                //消费者拉去数据 返回的是一个结果集
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(2));

                //遍历结果集
                for (ConsumerRecord<String,String> data:records) {
                    String topic = data.topic();
                    long offset = data.offset();
                    int partition = data.partition();
                    //offset+1的目的是为了在顺利的情况下可以得到下一次的数据
                    offsetMap.put(new TopicPartition(topic,partition),new OffsetAndMetadata(offset+1));
                    //获取BD1_old_user中的数据
                    String msg = data.value();//张三,1,1980-10-11      1->男 0->女
                    String[] fieldArray = msg.split(",");
                    if(fieldArray !=null && fieldArray.length>2){

                        String sex = fieldArray[1];
                        if(sex.equals("1")){
                            fieldArray[1] ="男";
                        }else if(sex.equals("0")){
                            fieldArray[1] ="女";
                        }
                    }
                    //模拟异常
                    //int i = 1/0;
                    //处理后，进行重新的拼接
                    msg = fieldArray[0]+","+fieldArray[1]+","+fieldArray[2];
                    //将拼接好的数据发送给BD1_new_user
                    ProducerRecord<String,String> record = new ProducerRecord<>("BD1_new_user",msg);
                    //发送数据
                    producer.send(record);
                }
                //提交偏移量到事务当中
                producer.sendOffsetsToTransaction(offsetMap,"old_user");

                //提交事务
                producer.commitTransaction();

            }

        }catch (Exception e){
            e.printStackTrace();
            //放弃事务
            producer.abortTransaction();
        }

    }

    //3.处理数据的过程

    //1.创建消费者，目的：去拉去BD1_old_user中的数据
    private static KafkaConsumer<String,String> createConsumer(){
        Properties props = new Properties();

        props.put("bootstrap.servers","node1:9092");
        props.put("group.id","old_user");
        // 配置事务的隔离级别
        props.put("isolation.level","read_committed");
        props.put("enable.auto.commit","false");
        props.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(props);

        consumer.subscribe(Arrays.asList("BD1_old_user"));

        return consumer;
    }

    //2.创建生成者，目的：将处理后的数据写入BD1_new_user中
    private static KafkaProducer<String,String> createProducer(){

        //1.创建链接
        Properties props = new Properties();
        props.put("bootstrap.servers","node1:9092");
        props.put("acks","all");
        // 开启事务必须要配置事务的ID
        props.put("transactional.id", "new_user");
        props.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");

        //2.创建生产者对象
        KafkaProducer<String,String> producer = new KafkaProducer<String, String>(props);

        return producer;

    }
}
