package com.study.util

import java.util

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import scala.collection.mutable

/**
 * @Description: kafka工具类
 * @Author: LiuQun
 * @Date: 2022/5/14 16:42
 */
object MyKafkaUtils {
  /**
   *
   * 消费者配置
   *
   * ConsumerConfig
   */
  private val consumerConfigs: mutable.Map[String, Object] = mutable.Map[String, Object](
    //kafka集群位置
    ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> MyPropsUtils(MyConfig.KAFKA_BOOT_SERVER),
    //kv反序列化器
    ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> MyPropsUtils(MyConfig.KAFKA_KEY_DES),
    ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> MyPropsUtils(MyConfig.KAFKA_VALUE_DES),
    //groupId

    //offset提交
    ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> "true",
    ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG -> "5000",
    ///offset重置："latest"、"earliest"
    ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "latest"

  )


  /** *
   * 基于SparkStreaming的消费，获取到KafkaDStream，使用默认offset
   */

  def getKafkaDStream(ssc: StreamingContext, topic: String, groupId: String) = {
    consumerConfigs.put(ConsumerConfig.GROUP_ID_CONFIG,groupId)


    val kafkaDStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(topic), consumerConfigs)
    )
    kafkaDStream
  }

  /**
   * 生产者对象
   */
  val producer: KafkaProducer[String,String] = createProducer()

  /**
   * 创建生产者对象
   * @return
   */
  def createProducer(): KafkaProducer[String,String] = {
    val producerConfigs: util.HashMap[String, AnyRef] = new util.HashMap[String, AnyRef]
    //生产者配置类ProducerConfig

    //kafka集群位置
    producerConfigs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,MyPropsUtils(MyConfig.KAFKA_BOOT_SERVER))
    //kafka序列化
    producerConfigs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,MyPropsUtils(MyConfig.KAFKA_KEY_DES))
    producerConfigs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,MyPropsUtils(MyConfig.KAFKA_VALUE_DES))

    //acks
    producerConfigs.put(ProducerConfig.ACKS_CONFIG,"all")
    //batch.size 16kb
    //linger.ms 0
    //retries 0

    //幂等配置
    producerConfigs.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG,"true")

    val producer = new KafkaProducer[String, String](producerConfigs)
    producer
  }

  /**
   * 生产(按照默认的黏性分区策略)
   */
  def send(topic:String,msg:String): Unit = {
    producer.send(new ProducerRecord[String,String](topic,msg))
  }

  /**
   * 生产(按照key进行分区)
   */
  def send(topic:String,key:String,msg:String): Unit = {
    producer.send(new ProducerRecord[String,String](topic,key,msg))
  }

  /**
   * 关闭生产者对象
   */

  def close():Unit = {
   if(producer != null) {
     producer.close();
   }
  }
}
