package com.daidai.util

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.kafka010._

import java.util.Properties
import scala.collection.mutable


object KafkaUtil {
  //  kafka 消费配置
  private var kafkaParams = mutable.Map[String, Object](
    "bootstrap.servers" -> PropertiesUtil.apply("kafka.broker.list"),
    "key.deserializer" -> classOf[StringDeserializer],
    "value.deserializer" -> classOf[StringDeserializer],
    "group.id" -> "gmall",
    "auto.offset.reset" -> "latest",
    "enable.auto.commit" -> (true: java.lang.Boolean)
  )


  /**
   * 默认 offsets 位置消费
   */
  def getKafkaStream(topic: String, ssc: StreamingContext, groupId: String) = {
    kafkaParams("group.id") = groupId
    val kafkaDStream = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParams)
    )
    kafkaDStream
  }


  /**
   * 指定 offsets 位置消费
   */
  def getKafkaStream(topic: String, ssc: StreamingContext, offsets: Map[TopicPartition, Long], groupId: String) = {
    kafkaParams("group.id") = groupId
    val kafkaDStream = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParams, offsets)
    )
    kafkaDStream
  }


  /**
   * 创建 Kafka 生产者对象
   */
  def createKafkaProducer(): KafkaProducer[String, String] = {
    //Kafka 生产配置
    val properties = new Properties()
    properties.put("bootstrap.servers", PropertiesUtil("kafka.broker.list"))
    properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.put("enable.idempotence", "true")

    val producer: KafkaProducer[String, String] = new KafkaProducer[String, String](properties)

    producer
  }

  private var producer = createKafkaProducer()

  /**
   * 生产
   */
  def send(topic: String, msg: String): Unit = {
    producer.send(new ProducerRecord[String, String](topic, msg))
  }

  /**
   * 生产 指定 key
   */
  def send(topic: String, key: String, msg: String): Unit = {
    producer.send(new ProducerRecord[String, String](topic, key, msg))
  }

  /**
   * 刷写缓冲区
   */
  def flush(): Unit = {
    if (producer != null) producer.flush()
  }

  /**
   * 关闭生产者对象
   */
  def close(): Unit = {
    if (producer != null) producer.close()
  }

}
