package com.shujia.streeam

import kafka.serializer.StringDecoder
import org.apache.spark.api.java.StorageLevels
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Durations, StreamingContext}

import scala.io.Source

object Demo13BlackListFilter {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[4]").setAppName("Demo8SSCOnKafka")

    val sc = new SparkContext(conf)

    val ssc = new StreamingContext(sc, Durations.seconds(5))


    //  读取的topic  和对应线程数
    val topics = Map("dianxin" -> 2)
    /**
      * 连接kafka创建Dstream
      *
      */

    val zk = "node2:2181,node3:2181,node4:2181"
    val groupId = "asdsafasd"


    //指定kafka 消费者参数
    val kafkaParams = Map[String, String](
      "zookeeper.connect" -> zk,
      "group.id" -> groupId,
      "zookeeper.connection.timeout.ms" -> "10000",
      "auto.offset.reset" -> "smallest"
    )

    val ds = KafkaUtils.createStream[String, String, StringDecoder, StringDecoder](
      ssc, kafkaParams, topics, StorageLevels.MEMORY_AND_DISK_SER)


    /**
      * foreachRDD  每个batch都会执行
      *
      */


    ds.foreachRDD(rdd => {

      println("driver端代码 每个batch运行一次")


      val blackList = Source.fromFile("spark/data/blacklist.txt").getLines().toList
      println("黑名单加载成功。。。。。")

      /**
        * 当黑名单很大的时候需要广播
        *
        * 每个batch重新广播 黑名单
        *
        */

      val blackListBro = sc.broadcast(blackList)

      rdd
        .map(_._2)
        .filter(line => {
          val mdn = line.split(",")(0)
          //使用广播变量
          blackListBro.value.contains(mdn)
        }).foreach(println)

    })


    ssc.start()
    ssc.awaitTermination()
    ssc.stop()
  }
}
