package org.niit.spark.connUil

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}
import java.util.{HashMap, Properties}
import java.util

class kafkaConn {

  def util(streamingCon: StreamingContext) = {
    val topic = "supermarket"
    val group = "niit3"


    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "niit202134070927:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    // 记录 元数据  ，消费位置信息
    streamingCon.checkpoint("./checkpoint")

    val topics = Array(topic)

    // 连接 kafka 配置项
    val streamRdd = KafkaUtils.createDirectStream(
      streamingCon,
      PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
    )

    (streamRdd)
  }

  def kafkaconnect(topic: String, str:String): Unit = {

    val props = new HashMap[String, Object]()

    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "niit202134070927:9092")
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

    // 构建 producer
    val producer = new KafkaProducer[String, String](props)

    producer.send(new ProducerRecord[String, String](topic, str))

  }


}
