package com.xinqing.bigdata.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * @Author:CHQ
  * @Date:2020 /6/28 14:58
  * @Description
  */
object Kafka {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark streaming wc")

    val streamingContext: StreamingContext = new StreamingContext(conf, Seconds(3))

    //从kafka读取数据
    val kafkaInputStream: ReceiverInputDStream[(String, String)] = KafkaUtils.createStream(streamingContext,
      "10.201.7.176:2181", "consumer-group", Map("test1" -> 3))

    //wc
    val resStream: DStream[(String, Int)] = kafkaInputStream.flatMap(_._2.split(",")).map((_, 1)).reduceByKey(_ + _)

    resStream.print()

    streamingContext.start()
    streamingContext.awaitTermination()
  }
}
