package com.zxq.mall.realtime.util


import java.util
import java.util.Properties

import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import scala.collection.mutable


/**
  * kafka工具类，用于生产数据和消费数据
  */
object MyKafkaUtils {

  //消费者配置
  private val consumerConfig : mutable.Map[String,String] = {
    mutable.Map(

      //kafka集群的位置
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> PropertiesUtils("kafka.broker.list"),

      //kv反序列化
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer",
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer",


      //groupId
      ConsumerConfig.GROUP_ID_CONFIG -> "mall",



      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> "true",

      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "latest"
    )
  }

  /**
    *
    * 基于SparkStreaming消费，获取到KafkaDStream,使用的是默认的offset
    */
  def getKafkaDStream(ssc : StreamingContext, topic : String, groupId : String) ={

    //通过外部传入动态获取groupId
    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG , groupId)

    val dStream = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(topic), consumerConfig))
    dStream
  }

  /**
    * 指定offset位置消费
    */

  def getKafkaDStream(topic:String,ssc:StreamingContext,offsets:Map[TopicPartition,Long],groupId:String)={

    consumerConfig(ConsumerConfig.GROUP_ID_CONFIG) = groupId
    val dStream = KafkaUtils.createDirectStream[String,String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String,String](Array(topic),consumerConfig,offsets)
    )
    dStream
  }

  /**
    * 生产者对象
    */

  private var producer : KafkaProducer[String,String] = createProducer()

  /**
    * 创建生产者对象函数
    */

  def createProducer():KafkaProducer[String,String] = {



    //生产者配置类
    val props = new Properties()

    //kafka集群位置
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,PropertiesUtils("kafka.broker.list"))

    //kv序列化器
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")

    props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG,"true")


    val producer = new KafkaProducer[String,String](props)
    producer
  }

  /**
    * 生产
    */

  def send(topic : String, msg : String) : Unit ={
    producer.send(new ProducerRecord[String,String](topic,msg))
  }
  /**
    * 生产 指定key
    */

  def send(topic:String, key:String,msg:String) ={
    producer.send(new ProducerRecord[String,String](topic,key,msg))


  }

  /**
    * 刷写缓冲区
    */
  def flush() : Unit = {
    if(producer != null) producer.flush()
  }

  /**
    * 关闭生产者对象
    */
  def close():Unit ={
    if(producer != null)
      producer.close()
  }
}
