package com.spark.util.client

import com.spark.util.core.Logging
import java.util.concurrent.Future
import java.util.Properties
import com.spark.util.utils.PropertiesUtil
import scala.collection.JavaConversions._
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord, RecordMetadata}

class KafkaProducerClient[K,V](func:() => KafkaProducer[K,V]) extends Serializable {

  lazy val producer: KafkaProducer[K,V] = func()

  def send(topic:String,key:K,value:V): Future[RecordMetadata] = producer.send(new ProducerRecord[K,V](topic,key,value))

  def send(topic:String,value:V): Future[RecordMetadata] = producer.send(new ProducerRecord[K,V](topic,value))
}

object KafkaProducerClient extends Logging {

  def apply[K,V](config: Map[String, Object]): KafkaProducerClient[K,V] = {
    val func = () => {
      val producer = new KafkaProducer[K,V](config)
      sys.addShutdownHook{
        info("Execute hook thread: KafkaSink")
        producer.close()
      }
      producer
    }
    new KafkaProducerClient[K,V](func)
  }

  def apply[K,V](config: Properties): KafkaProducerClient[K,V] = this.apply(config.toMap)
}
