package day03.acc

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.util.{AccumulatorV2, LongAccumulator}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @author wsl
 * @version 2020-12-07
 *          分布式共享只读变量
 *          不采用广播变量，driver端的数据会发往每个executor的每个task
 */
object BroadCast {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("cache").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)

    val rdd: RDD[String] = sc.makeRDD(List("WARN:Class Not Find", "INFO:Class Not Find", "DEBUG:Class Not Find"), 4)
    val str: String = "WARN"

    // 声明广播变量
    val bdStr: Broadcast[String] = sc.broadcast(str)

    rdd
      .filter {
        // log=>log.contains(str)         //普通变量需要发往每个task
        log => log.contains(bdStr.value) //给每个executor广播一个变量，每个task用的都是同一个
      }
      .foreach(println)

    sc.stop()

  }
}
