package com.sjc.sparkstreaming.lesson01

import org.apache.log4j.{Level, Logger}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 实时统计单词出现大次数
 */
object WordCount {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)

    // 步骤1：初始化程序入口
    val conf = new SparkConf();
    conf.setMaster("local[2]");
    conf.setAppName("wordcount...")

    val ssc = new StreamingContext(conf,Seconds(2))

    // 步骤2：通过数据源获取数据（数据的输入）
    val myDataDStream: ReceiverInputDStream[String] = ssc.socketTextStream("localhost",9999)

    // 步骤3：进行一些算子的操作，实现业务逻辑（数据的处理）
    val lineDStream: DStream[String] = myDataDStream.flatMap(_.split(","))
    val wordAndOneDStream:DStream[(String,Int)] =  lineDStream.map((_,1))
    val resultDStream:DStream[(String,Int)] = wordAndOneDStream.reduceByKey(_+_)

    // 步骤4：数据的输出
    resultDStream.print()

    // 步骤5： 启动任务
    ssc.start()

    // 步骤6： 等待任务结束
    ssc.awaitTermination();
  }
}
