package com.cike.sparkstudy.sparkstreaming.scala

import org.apache.spark.SparkConf
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext

object TransformBlacklist {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("TransformBlacklist").setMaster("local[2]")
    val ssc = new StreamingContext(conf, Seconds(5))

    //1、首先模拟黑名单数据
    val blacklist = Array("tom", true)
    val blacklistRDD = ssc.sparkContext.parallelize(blacklist)

    //2、然后打开监控数据源途径
    val adsClickLogDStream = ssc.socketTextStream("bigdata", 9999)
    //3、切割日志数据
    val userAdsClickLogDStream = adsClickLogDStream
      .map { adsClickLog => (adsClickLog.split(" ")(1), adsClickLog) }
    //4、接下来就是操作数据，进行黑名单的过滤和返回信息
    /*val validAdsClickLogDStream = userAdsClickLogDStream.transform(userAdsClickLogRDD => {
      val joinedRDD = userAdsClickLogRDD.leftOuterJoin(blacklistRDD)
      val filteredRDD = joinedRDD.filter(tuple => {
        if (tuple._2._2.getOrElse(false)) {
          false
        } else {
          true
        }
      })
      val validAdsClickLogRDD = filteredRDD.map(tuple => tuple._2._1)
      validAdsClickLogRDD
    })

    validAdsClickLogDStream.print()*/

    ssc.start()
    ssc.awaitTermination()
  }
}