package com.leal.util

import com.leal.sink.KafkaSink
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast

import java.util.Properties

/**
 * @Classname bigdata
 * @Description Kafka util
 * @Date 2023/2/28 12:43
 * @Created by leal
 */
object KafkaUtil {

  /**
   *  create a broadcast producer
   * @param sc SparkContext
   * @param bootstrapServers BOOTSTRAP_SERVERS_CONFIG
   * @return a broadcast producer
   */
  def kafkaProducer(sc:SparkContext,bootstrapServers:String): Broadcast[KafkaSink[String, String]] ={
    val kafkaProducerConfig:Properties={
      val properties = new Properties()
      properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,bootstrapServers)
      properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer].getName)
      properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer].getName)
      properties.put(ProducerConfig.ACKS_CONFIG,"1")
      properties.put(ProducerConfig.RETRIES_CONFIG,"3")
      properties.put(ProducerConfig.BATCH_SIZE_CONFIG,"16384")
      properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG,"lz4")
      properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG,"33554432")
      properties.put(ProducerConfig.LINGER_MS_CONFIG,"1")
      //properties.put(ProducerConfig.AUTO_INCLUDE_JMX_REPORTER_CONFIG,"true")

      properties
    }
    // broadcast executor
    sc.broadcast(KafkaSink[String,String](kafkaProducerConfig))
  }
}
