package com.shujia.flink.kafka

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

object Demo3TS {
  def main(args: Array[String]): Unit = {


    /**
      * 创建生产者
      *
      */

    val properties = new Properties
    //kafka broker 地址列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")
    //key和value序列化的方式
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("transactional.id","asdasdas")


    val producer = new KafkaProducer[String, String](properties)

    /**
      * 使用kafka 十五
      *
      */


    //初始化事务
    producer.initTransactions()

    //开启事务
    producer.beginTransaction()


    //发数据
    producer.send(new ProducerRecord[String, String]("ts", "java"))
    println("发送第一条")

    Thread.sleep(5000)

    producer.send(new ProducerRecord[String, String]("ts", "saprk"))
    println("发送第二条")
    //提交事务
    producer.commitTransaction()

    producer.flush()

    producer.close()

    /**
      *
      *
      * --isolation-level read_committed  读以提交的数据
      *
      * kafka-console-consumer.sh --bootstrap-server  master:9092,node1:9092,node2:9092 --isolation-level read_committed   --from-beginning --topic ts
      *
      */

  }

}
