package com.atbeijing.bigdata.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming13_Stop {

    def main(args: Array[String]): Unit = {

        // TODO 创建环境对象
        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
val ssc = new StreamingContext(sparkConf, Seconds(3))
        val socketDS: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)
        val wordsDS = socketDS.flatMap(_.split(" "))
        val wordToOneDS = wordsDS.map((_,1)).reduceByKey(_+_)
//        val windowDS = wordToOneDS.reduceByKeyAndWindow(
//            (x, y) => {
//                println(x + "+" + y)
//                x + y
//            },
//            (x, y) => {
//                println(x + "-" + y)
//                x - y
//            },
//            Seconds(9),
//            Seconds(3)
//        )
        wordToOneDS.print()
        // 不能
        ssc.start()
        // 不能
        //var flg = false
        new Thread(new Runnable {
            override def run(): Unit = {

                // Mysql => Table => stopSpark => 1
                // ZK => /node => /stopSpark
                // Redis => K-V => stopSpark->0
                // HDFS => path => /stopSpark

//                while ( true ) {
//                    Thread.sleep(5000)
//                    if ( flg ) {
//                        ssc.stop()
//                    }
//                }
                Thread.sleep(3000)

                ssc.stop(true, true)
            }
        }).start()

        ssc.awaitTermination()
        // 可以写，但是执行不到
        //ssc.stop()
    }
}
