package com.example.review;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

/**
 * Created with IntelliJ IDEA.
 * ClassName: ProducerTransaction
 * Package: com.example.review
 * Description:
 * User: fzykd
 *
 * @Author: LQH
 * Date: 2023-08-05
 * Time: 14:08
 */

//生产者 - 数据不重复
public class ProducerTransaction {

    public static void main(String[] args) {
        Properties properties = new Properties();
        //配置必要的参数
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop102:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        //必须指定 transactional.id 在配置信息当中 随便起 全局唯一即可
        properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"and");

        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);
        //通过事务实现数据不重复
        //1.初始化事务
        kafkaProducer.initTransactions();
        //2.开启事物
        kafkaProducer.beginTransaction();

        try {
            kafkaProducer.send(new ProducerRecord<>("first", "KAFKA"));
            //提交事务
            kafkaProducer.commitTransaction();
        } catch (Exception e) {
            //出现异常的化 要终止事务
            kafkaProducer.abortTransaction();
        } finally {
            kafkaProducer.close();
        }
    }

}
