package com.learn.lb.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * kafka作为数据源 (receiver方式)
 *
 * @author laibo
 * @since 2019/9/4 16:08
 */
object KafkaReceiverSourceDemo {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[2]").setAppName("WorkCountFileStreaming")
    val ssc = new StreamingContext(sparkConf, Seconds(10))
    val groupIdMap = Map("SparkStreamingGroup" -> 1)
    //创建一个kafka数据的接收者，接受数据会独占一个线程，所以local[num] num必须大于1
    val receiverDStream = KafkaUtils.createStream(ssc, "master:2181", "123", groupIdMap)
    val ds = receiverDStream.map(_._2).flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _)
    ds.print()
    ssc.start()
    ssc.awaitTermination()
  }
}
