package com.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.kafka.KafkaUtils

object KafkaTest1 {
 
  def main(args: Array[String]): Unit = {
    
    val conf = new SparkConf().setMaster("local[*]").setAppName("KafkaTest1")
    val ssc: StreamingContext = new StreamingContext(conf, Seconds(5))
    
    val zk = "localhost:2181"
    val groupId = "g1" // test-consumer-group
    val topics = Map[String,Int]("test" -> 1)
    val data : ReceiverInputDStream[(String, String)] =  KafkaUtils.createStream(ssc, zk, groupId, topics);
    // 取出value值
    val lines : DStream[String] = data.map(_._2)
    // 切分压平
    val words : DStream[String] = lines.flatMap(_.split(" "));
    // 组合元组
    val wordAndOne : DStream[(String,Int)] = words.map((_,1))
    // 聚合
    val reduce : DStream[(String, Int)] = wordAndOne.reduceByKey(_+_)
    
    reduce.count().print()
    
    
    reduce.print()
    ssc.start()
    ssc.awaitTermination()
    
    
  }
}