package com.atguigu.spark

import org.apache.kafka.clients.consumer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import java.util.Properties

object SparkKafkaConsumer {

    def main(args: Array[String]): Unit = {

        //1. 初始化上下文环境
        val conf = new SparkConf().setMaster("local[*]").setAppName("spark-kafka")
        val ssc = new StreamingContext(conf = conf, batchDuration = Seconds(3))

        // 消费数据
        val kafkaParams = Map[String,Object](
            ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.1.12:9092",
            ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
            ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
            ConsumerConfig.GROUP_ID_CONFIG -> "test"
        )


        val kafkaDStream = KafkaUtils.createDirectStream(ssc, LocationStrategies.PreferConsistent, ConsumerStrategies.Subscribe[String, String](Set("first"), kafkaParams))

        kafkaDStream.map(_.value)
            .print

        // 3. 执行代码 并阻塞
        ssc.start()
        ssc.awaitTermination()


    }

}
