package com.zhao.algorithm.code

import java.util.Properties

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.Logger
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast

/**
 * Description: <br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/11/23 11:42
 *
 * @author 柒柒
 * @version : 1.0
 */

object CustomKafkaProducerSingle {
  val load: Config = ConfigFactory.load("application.properties")

  @volatile private var instance: Broadcast[CustomKafkaSink[String,String]] = null

  def getInstance(sc: SparkContext): Broadcast[CustomKafkaSink[String,String]] ={
    if (instance == null){
      synchronized{
        if (instance == null){
          val kafkaProducerConfig: Properties = {
            //新建配置项
            val props: Properties = new Properties()
            //配置broker
            props.setProperty("bootstrap.servers", load.getString("algorithm.kafka.producer.bootstrap.servers"))
            //客户端名称 设置之后不能并行化,会报错,多线程下 client_id默认与线程号有关,但是设置之后就固定了,不能多线程
            //props.setProperty("client.id",load.getString("algorithm.kafka.producer.client.id"))
            //序列化类型
            props.setProperty("key.serializer", classOf[StringSerializer].getName)
            props.setProperty("value.serializer", classOf[StringSerializer].getName)
            props
          }

          //将生成者广播

          instance = sc.broadcast(CustomKafkaSink[String,String](kafkaProducerConfig))

          val log: Logger = Logger.getLogger(CustomKafkaProducerSingle.getClass)
          log.warn("kafka producer init done!")

          instance
        }
      }
    }
    instance
  }
}
