package com.shujia.spark.core

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo21Bro {
  def main(args: Array[String]): Unit = {

    //环境配置对象
    val conf = new SparkConf()

    //指定任务名
    conf.setAppName("pi")
    conf.setMaster("local")


    //创建spark 环境,式写spark代码的入口
    val sc = new SparkContext(conf)

    val studentsRDD: RDD[String] = sc.textFile("data/students.txt")

    val ids = List("1500100988", "1500100966", "1500100983", "1500100961")


    val filterRDD: RDD[String] = studentsRDD.filter(line => {
      val id: String = line.split(",")(0)

      ids.contains(id)
    })


    filterRDD.foreach(println)

    /**
      * 广播变量
      *
      */


    val ids1 = List("1500100988", "1500100966", "1500100983", "1500100961")

    //1、在Driver端将变量广播
    val broIds: Broadcast[List[String]] = sc.broadcast(ids1)

    val filterRDD1: RDD[String] = studentsRDD.filter(line => {
      val id: String = line.split(",")(0)


      //2、在算子内获取广播变量
      broIds.value.contains(id)
    })


    filterRDD1.foreach(println)

    /**
      * 写rdd需要注意的问题
      * 1、rdd不能嵌套使用,在算子中不能使用rdd
      * 2、在算子内不能使用sparkContext
      */

    /*filterRDD1.foreach(i => {
      filterRDD1.foreach(j => {
        println(i + j)
      })
    })*/

    /*
        filterRDD1.foreach(i => {
          sc.textFile("data/students.txt")
        })
    */


  }

}
