package com.dtkavin.sparkstreaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Duration, Milliseconds, StreamingContext}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/19 0019
  * Time : 19:24
  * Discribtion : 从套接字获取数据
  */
class SocketStreaming {

}

object SocketStreaming {
  def main(args: Array[String]) {
    SelfLogging.getSelfLoggingLevel("WARN")
    //        val conf = new SparkConf().setAppName("WordCountBatch").setMaster("local[3]")
    //        val ssc = new StreamingContext(conf, Milliseconds(5000))
    //        //    ssc.checkpoint("D:/data/checkpoint/wordcount")
    //        val res = ssc.socketTextStream("spark01", 10020)
    //
    //        res.print()
    //
    //        ssc.start()
    //        ssc.awaitTermination()

    //    val socket = new Socket("192.168.98.51", 7777)
    //    val r = new BufferedReader(new InputStreamReader(socket.getInputStream))
    //    var str = ""
    //    while ({str = r.readLine();str} != null) {
    //      println(str)
    //    }

    val conf = new SparkConf().setAppName("SocketStreaming").setMaster("local[3]")
    val ssc = new StreamingContext(conf, Milliseconds(5000))

    ssc.checkpoint("d:/data/checkpoint")
    val reduced = ssc.socketTextStream("spark01", 10020).flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)

    reduced.foreachRDD {
      x => {
        println(x)
        x.foreachPartition {
          iter => {
            iter.foreach {
              cp =>
                println("dtkavin dealed: " + cp._1 + ": " + cp._2)
            }
          }
        }
      }
    }

    reduced.print()
    ssc.start()
    ssc.awaitTermination()


  }
}

