package streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * 过滤黑名单
  * 日志  ==>DStream
  * 20181010,zs
  * 20181011,ls
  * 20191010,ww
  *     ==> (zs: 20181010,zs)(ls: 20181011,ls)(ww: 20191010,ww)
  *
  * 黑名单 ==> RDD (zs, true)(ls,true)
  * zs
  * ls
  *     ==>(zs: true)(ls: true)
  *
  * 结果 ==> 20191010,ww
  *
  * leftjoin                     filter
  * (zs: [<20181010,zs>, true])   ==>  x
  * (ls: [<20181010,ls>, true])   ==>  x
  * (ww: [<20191010,ww>, false])  ==>  √
  *
  */
object TransformApp {

  def main(args: Array[String]): Unit = {
    val sc = new SparkConf().setMaster("local[*]").setAppName("TransformApp")
    val ssc = new StreamingContext(sc, Seconds(5))

    val blacks = List("ls", "zs")
    val blackRDD = ssc.sparkContext.parallelize(blacks).map(x => (x, true))

    val lines = ssc.socketTextStream("localhost", 9999)
    val result = lines.map(x => (x.split(",")(1), x)).transform(rdd => {
      rdd.leftOuterJoin(blackRDD)
        .filter(x => x._2._2.getOrElse(false) != true)
        .map(x => x._2._1)
    })

    result.print()

    ssc.start()
    ssc.awaitTermination()
  }

}
