package org.niit.kafka;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.*;
import java.util.stream.Collectors;

public class TransactionProgram {

    public static void main(String[] args) {
        //1.获得创建好的生产者和消费者
        KafkaConsumer<String, String> consumer = createConsumer();
        KafkaProducer<String, String> producer = createProducer();

        //因为我们处理 数据的时候可能会发生异常，如果没有异常就提交事务，如果有异常在就catch中 放弃事务
        try{
            //2.生产者对事务进行初始化
            producer.initTransactions();
            //3.编写死循环，在循环中不断的获取旧主题中的数据并进行处理，然后将处理后的数据生产到新主题当中
            while (true){
                //4.生产者开启事务
                producer.beginTransaction();
                //5.定义一个Map用来保存偏移量offset,因为我们要手动的提交offset
                Map<TopicPartition, OffsetAndMetadata> offsetMap = new HashMap<>();

                //6.消费者去消费旧主题中的数据，返回是一个结果集（一批数据）
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(2));
                //7.遍历结果集,处理数据   旧：张三,1,1980‐10‐09  李四,0,1985‐11‐01  新 ：张三,男,1980‐10‐09   李四,女,1985‐11‐01
                for (ConsumerRecord<String,String> data : records) {
                    String topic = data.topic();
                    long offset = data.offset();
                    int partition = data.partition();
                    //offset+1的目的是为了在顺利的情况下可以得到下一次的数据
                    offsetMap.put(new TopicPartition(topic,partition),new OffsetAndMetadata(offset+1));

                    String value = data.value();// "张三 ,1 , 1980‐10‐09"
                    String[] fields = value.split(",");
                    if(fields !=null && fields.length>2){

                        String sex = fields[1];
                        if(sex.equals("1")){
                            fields[1] = "男";
                        }else if (sex.equals("0")){
                            fields[1] = "女";
                        }
                    }
                    //int i = 1 / 0 ;
                    //8.处理后，进行重新拼接
                    //value = fields[0]+","+fields[1]+","+fields[2]; // 张三 ,男,1 980‐10‐09
                     value = Arrays.stream(fields).collect(Collectors.joining(",")); // 张三 ,男,1 980‐10‐09
                    //9.将拼接好的数据发送给新的主题
                    ProducerRecord<String,String> record = new ProducerRecord<>("BD1_new_user",value);
                    producer.send(record);
                }
                //10.提交偏移量到事务当中
                producer.sendOffsetsToTransaction(offsetMap,"old_user");

                //11.提交事务
                producer.commitTransaction();

            }


        }catch (Exception e){
            e.printStackTrace();
            //12.放弃事务
            producer.abortTransaction();
        }

    }

    //1.创建消费者，目的去旧主题获取数据
    private static KafkaConsumer<String,String> createConsumer(){
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"node1:9092");
        props.put(ConsumerConfig.GROUP_ID_CONFIG,"old_user");
        //配置事务隔离级别
        props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG,"read_committed");
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");

        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());

        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(props);

        consumer.subscribe(Arrays.asList("BD1_old_user"));

        return consumer;

    }

    //2.创建生产者，目的将处理后的数据生产到新主题当中
    private static KafkaProducer<String,String> createProducer(){
            Properties props = new Properties();
            props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"node1:9092");
            props.put(ProducerConfig.ACKS_CONFIG,"all");
            //开启事务必须要配置事务的ID
            props.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"new_user");
            props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
            props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());

            KafkaProducer<String,String> producer = new KafkaProducer<String, String>(props);

            return producer;

        }
}
