import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.codehaus.jackson.map.deser.std.StringDeserializer

/**
  * @author dinghao 
  * @create 2021-08-19 10:23 
  * @message OK
  */
object KafkaSource {
  def main(args: Array[String]): Unit = {
    val ssc = new StreamingContext(new SparkConf().setMaster("KafkaSource").setMaster("local[*]"), Seconds(4))
    val kafkaMap = Map(
//      ConsumerConfig.GROUP_ID_CONFIG -> ConnectMessage.get("kafka.GROUP_ID_CONFIG"),
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> ConnectMessage.get("kafka.bootstrap.servers"),
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer]

    )
    val kafkaData = KafkaUtils.createDirectStream(ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set(ConnectMessage.get("kafka.topic")), kafkaMap)
    )
    kafkaData.map(_.value).flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).print()
    ssc.start()
    ssc.awaitTermination()
  }
}
