package com.example


import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.rdd.RDD.rddToOrderedRDDFunctions
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.streaming._

import org.apache.spark.streaming.dstream.DStream




object sockread {
  def main(args: Array[String]) {
    //    System.setProperty("hadoop.home.dir", "E:\\hadoopcom\\hadoop-common-2.2.0-bin-master")
    //      val conf = new SparkConf().setAppName("zhanhcal").setMaster("local")
    ////      var sc = new SparkContext(conf)
    //      val ssc = new StreamingContext(conf, Seconds(30))
    ////    ssc.setLogLevel("Warn")
    //    //    //val file = sc.textFile("hdfs://localhost:9000/Spark/SecondarySort/Input/SecondarySort2.txt")
    ////      val file = ssc.textFile("E:\\bushu\\hah.txt")
    ////    val file = ssc.textFile("/tmp/hah.txt")
    //    val data: DStream[String] = ssc.textFileStream("E:\\bushu\\hah.txt")
    ////    val words = data.flatMap(line => line.split(" "))
    ////    val counts=words.map(word => (word,1)).reduceByKey{case(x,y) => x + y }
    //    val flatData  = data.flatMap(_.split(" "))
    //    val words = flatData.map((_,1))
    //    val countEachWord: DStream[(String, Int)] = words.reduceByKey(_+_)
    //
    //
    //    countEachWord.print()
    val sparkConf = new SparkConf().setAppName("StreamingTest").setMaster("local[4]")
//    val sparkConf = new SparkConf().setAppName("StreamingTest")
    val ssc = new StreamingContext(sparkConf, Seconds(30))

        val lines = ssc.socketTextStream("10.240.4.137", 9999)
        val words = lines.flatMap(_.split(" "))
        val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _)
        wordCounts.print()
//    val lines = ssc.textFileStream("/tmp/input/")
//
//    val words = lines.flatMap(_.split(" "))
//    val pairs = words.map(word => (word, 1))
//    val wordCounts = pairs.reduceByKey(_ + _)
//    wordCounts.print()
    ssc.start()
    ssc.awaitTermination()


    //    counts.saveAsTextFile("/tmp/haha4")

  }
}