package com.streaming.demo

import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext, Time}

/**
  * @author td
  * @date 2018/4/2
  */
object SparkStreamingDemo {

  def main(args: Array[String]): Unit = {

    // 设置配置
    val conf = new SparkConf().setMaster("local[2]").setAppName("sparkDemo");

    // 设置stremingContext 上下文
    val ssc = new StreamingContext(conf,Seconds(10));


    val lines = ssc.socketTextStream("localhost",9999);


    val wordsRdd = lines.flatMap(_.split(" ")).map((_,1));

    val reduceRdd = wordsRdd.reduceByKey(_+_);

  /*  reduceRdd.foreachRDD(rdd=>{
      rdd.saveAsTextFile("hdfs://master:8020/user/streaming/result")
    }
    )*/

  /*   val saveFunc = (rdd: RDD[(String,Int)], time: Time) => {

          rdd.saveAsTextFile("hdfs://master:8020/user/streaming/result");
     }*/

    reduceRdd.foreachRDD(rdd=>{
      rdd.saveAsTextFile("hdfs://master:8020/user/streaming/result");
    });
  //  reduceRdd.saveAsTextFiles("hdfs://master:8020/user/streaming/result");


    reduceRdd.print()
    ssc.start();

    ssc.awaitTermination();

  }
}
