package com.shujia.spark

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo19Broadcast {
  def main(args: Array[String]): Unit = {


    val conf: SparkConf = new SparkConf().setMaster("local[8]").setAppName("map")
    val sc: SparkContext = new SparkContext(conf)


    val students: RDD[String] = sc.textFile("spark/data/students.txt")

    println("filter之前")

    val filterRDD: RDD[String] = students.filter(line => {

      val bool: Boolean = "男".equals(line.split(",")(3))

      bool
    })

    println("filter之后")

    filterRDD.foreach(println)


    //scala集合
    val list: List[String] = List("文科一班", "文科二班", "文科三班")

    //在Driver端定义广播变量
    val broList: Broadcast[List[String]] = sc.broadcast(list)


    val clazzSRDD: RDD[String] = students.filter(line => {

      val clazz: String = line.split(",")(4)

      //在Executor端使用广播变量
      val value: List[String] = broList.value

      value.contains(clazz)

    })


    clazzSRDD.foreach(println)


    val RDD1: RDD[Int] = sc.parallelize(List(1, 2, 3, 4, 5, 6, 7))


    var sum: Int = 0

    RDD1.foreach(line => {

      /**
        * 在Executor中对一个普通变量做的任何修改都不会映射到Driver端
        *
        * 因为Executor端和Driver端属于不同的jvm进程
        *
        */

      sum += 1
      println(line)

    })

    println("sum:" + sum)




  }
}
