package com.gitee.dufafei.spark.connector.kafka

import java.util.Properties
import java.util.concurrent.Future

import com.gitee.dufafei.spark.pattern.Logging
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord, RecordMetadata}

import scala.collection.JavaConversions._

/**
 * <dependency>
 * <groupId>org.apache.kafka</groupId>
 * <artifactId>kafka-clients</artifactId>
 * <version>${kafka.version}</version>
 * <exclusions>
 * <exclusion>
 * <groupId>net.jpountz.lz4</groupId>
 * <artifactId>lz4</artifactId>
 * </exclusion>
 * </exclusions>
 * </dependency>
 */
class KafkaPrClient[K, V](func:() => KafkaProducer[K, V]) extends Serializable {

  lazy val producer: KafkaProducer[K, V] = func()

  def send(topic: String,key: K, value: V): Future[RecordMetadata] = producer.send(new ProducerRecord[K, V](topic, key, value))

  def send(topic: String, value: V): Future[RecordMetadata] = producer.send(new ProducerRecord[K, V](topic, value))
}

object KafkaPrClient extends Logging {

  val BOOTSTRAP_SERVERS = "bootstrap.servers"
  val KEY_SERIALIZER = "key.serializer"
  val VALUE_SERIALIZER = "value.serializer"
  val DefaultSerializer = "org.apache.kafka.common.serialization.StringSerializer"

  def apply[K,V](config: Map[String, Object]): KafkaPrClient[K,V] = {
    val func = () => {
      val producer = new KafkaProducer[K,V](config)
      sys.addShutdownHook{
        info(s"Execute hook thread: $name")
        producer.close()
      }
      producer
    }
    new KafkaPrClient[K, V](func)
  }

  def apply[K, V](config: Properties): KafkaPrClient[K, V] = apply(config.toMap)
}
