package cn.doitedu.kafka.day02

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer

object KafkaUtils {

  //指定生产者的参数
  val properties = new Properties()
  //broker的地址
  // 连接kafka节点
  properties.setProperty("bootstrap.servers", "node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092")
  //指定key序列化方式
  properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
  //指定value序列化方式
  properties.setProperty("value.serializer", classOf[StringSerializer].getName) // 两种写法都行
  properties.setProperty("acks", "-1")
  //发送数据失败最大重试次数
  properties.setProperty("reties", "100")
  //生产者写入之前将数据进行压缩
  properties.setProperty("compression.type", "gzip")
  val producer = new KafkaProducer[String, String](properties)

  var count = 0

  def writeDataToKafka(topic: String, line: String): Unit = {
    val record = new ProducerRecord[String, String](topic, line)
    producer.send(record)
    count += 1
    if (count % 1 == 0) {
      producer.flush()
    }
  }

}
