package org.hadoop.spark

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

object Streaming {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local[2]") //至少两个线程
      .setAppName("NetworkWordCount")
    //声明SparkStreaming对象
    val ssc = new StreamingContext(conf, Seconds(2))
    //声明监听的服务器及端口
    val lines = ssc.socketTextStream("192.168.56.102", 9999)
    lines.flatMap(_.split("\\s+")).map((_, 1)).reduceByKey(_ + _).print();
    //输出接收到的这个端口的数据
    lines.print();
    //开始运行
    ssc.start();
    ssc.awaitTermination();
  }
}
