package cn.itcast.dstream

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.immutable

object SparkStreaming_Kafka_createDstream {
    def main(args: Array[String]): Unit = {
        //1.创建sparkConf,并开启wal预写日志，保存数据源
        val sparkConf: SparkConf = new SparkConf().setAppName("SparkStreaming_Kafka_createDstream").setMaster("local[4]").set("spark.streaming.receiver.writeAheadLog.enable", "true")
        //2.创建sparkContext
        val sc = new SparkContext(sparkConf)
        //3.设置日志级别
        sc.setLogLevel("WARN")
        //3.创建StreamingContext
        val ssc = new StreamingContext(sc, Seconds(5))
        //4.设置checkpoint
        ssc.checkpoint("./Kafka_Receiver")
        //5.定义zk地址
        val zkQuorum = "hadoop01:2181,hadoop02:2181,hadoop03:2181"
        //6.定义消费者组
        val groupId = "spark_receiver"
        //7.定义topic相关信息 Map[String, Int],这里的value并不是topic分区数,它表示的topic中每一个分区被N个线程消费
        val topics = Map("kafka_spark" -> 1)
        //8.通过KafkaUtils.createDstream对接kafka这时相当于同时开启3个receiver接受数据
        val receiverDstream:immutable.IndexedSeq[ReceiverInputDStream[(String,String)]] = (1 to 3).map(x => {
            val stream: ReceiverInputDStream[(String, String)] = KafkaUtils.createStream(ssc,zkQuorum,groupId,topics)
            stream
        })
        //9.使用ssc中的union方法合并所有的receiver中的数据
        val unionDStream: DStream[(String, String)] = ssc.union(receiverDstream)
        //10.SparkStreaming获取topic中的数据
        val topicData: DStream[String] = unionDStream.map(_._2)
        //11.按空格进行切分每一行,并将切分的单词出现次数记录为1
        val wordAndOne: DStream[(String, Int)] = topicData.flatMap(_.split(" ")).map((_, 1))
        //12.统计单词在全局中出现的次数
        val result: DStream[(String, Int)] = wordAndOne.reduceByKey(_ + _)
        //13.打印输出结果
        result.print()
        //14.开启流式计算
        ssc.start()
        ssc.awaitTermination()
    }

}
