package com.xuejiujiu.gmall.realtine.util

import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import java.util
import scala.collection.mutable

/**
 * Kafka工具类， 用于生产数据和消费数据
 */
object MyKafkaUtiles {

  /**
   * 消费者配置
   *
   * ConsumerConfig
   */
  private val consumerConfigs: mutable.Map[String, Object] = mutable.Map[String, Object](
    // 消费者配置: groupId, kv反序列化器, kafk集群位置, offset提交, offset重置
    // kafka集群位置
//    ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "master:9092,slave1:9092,slave2:9092",
    // 这里读取resources文件夹下的config.properties文件
    ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> MyPropsUtils(MyConfig.KAFKA_BOOTSTRAP_SERVERS),      // kv反序列化器
    ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer",
    ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer",
    // groupId
    // offset提交  自动:true 手动:false
    ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> "true",
    // 自动提交的时间间隔
    //ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG -> ""
    // offset重置 "latest" "earliest"
    ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "latest",

  )

  /**
   * 基于SpearkStreaming消费, 获取到KafkaDStream
   */

  def getKafkaDStream(ssc: StreamingContext, topic: String, groupId: String) = {
    consumerConfigs.put(ConsumerConfig.GROUP_ID_CONFIG, groupId)

    val kafkafDStream = KafkaUtils.createDirectStream(ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(topic), consumerConfigs))
    //(ssc,LocationStrategies, ConsumerStrategies.Subscribe[String, String](Array(topic),consumerConfigs))
    kafkafDStream
  }

  /**
   * 生产者对象
   */
  val producer: KafkaProducer[String, String] = createProducer()

  /**
   * 创建生产者对象
   *
   * producerConfigs
   */
  def createProducer(): KafkaProducer[String, String] = {
    val producerConfigs = new util.HashMap[String, AnyRef]
    // 生产者配置类producerConfigs
    // kafka集群位置
    producerConfigs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, MyPropsUtils(MyConfig.KAFKA_BOOTSTRAP_SERVERS))
    // kv序列化器
    producerConfigs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    producerConfigs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

    // acks
    producerConfigs.put(ProducerConfig.ACKS_CONFIG, "all")
    // batch.size 默认：16kb
    // linger.ms 默认：0
    // retries

    // 幂等配置
    producerConfigs.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true")
    val producer = new KafkaProducer[String, String](producerConfigs)
    producer
  }

  /**
   * 生产(默认粘性分区策略)
   */
  def send(topic: String, msg: String): Unit = {
    producer.send(new ProducerRecord[String, String](topic, msg))
  }

  /**
   * 生产(按照key进行分区)
   */
  def send(topic: String, key: String, msg: String): Unit = {
    producer.send(new ProducerRecord[String, String](topic, key, msg))
  }

  /**
   * 关闭生产者对象
   */
  def close():Unit = {
    if(producer != null) producer.close()
  }
}
