package com.atguigu.stream.test

import com.atguigu.stream.util.MySparkStreamContextUtil
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}

/**
 * description ：数据源为nc的streaming测试
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/1/13 15:37
 * modified By ：
 * version:    : 1.0
 */
object NcSourceStream {

  def main(args: Array[String]): Unit = {

    // 获取 streamingContext
    val ssc: StreamingContext = MySparkStreamContextUtil.get(args)

    // 创建nc数据源
    val source: ReceiverInputDStream[String] = ssc.socketTextStream("hadoop101", 9999)

    // wordcount 处理
    val result: DStream[(String, Int)] = source.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)

    // 输出结果
    result.print()

    // 启动 streamingContext
    ssc.start()

    // 让 streamingContext 运行端的driver阻塞，一直运行
    ssc.awaitTermination()
  }

}
