package com.shujia.core

import org.apache.spark.{SparkConf, SparkContext}

object Demo13BroadCast {
  def main(args: Array[String]): Unit = {

    //1、创建spark配置文件对象
    val conf = new SparkConf()
    // 运行方式
    conf.setMaster("local") //本地运行
    conf.setAppName("WordCount") //程序名


    //2、创建spark上下文对象
    val sc = new SparkContext(conf)

    val studentRDD = sc.textFile("data/student.txt")


    //获取性别为男的学生
    val filterRDD = studentRDD.filter(line => {
      val gender = line.split(",")(3)

      "男".equals(gender)
    })
    filterRDD.foreach(println)

    val clazzs = List("文科一班", "文科二班", "文科三班")
    studentRDD.filter(line => {
      val clazz = line.split(",")(4)

      clazzs.contains(clazz)
    })

      .foreach(println)


    //1、在Driver端将需要使用变量广播

    //广播变量
    val broClass = sc.broadcast(clazzs)
    studentRDD.filter(line => {
      val clazz = line.split(",")(4)
      //在Executor中获取广播变量
      val clazzss = broClass.value
      clazzss.contains(clazz)
    })


    //1、通过广播变量实现map join   在map端进行join  只适用于大表关联小表
    val scoreRDD = sc.textFile("data/score.txt")


    //将小表广播，加载到内存中
    val stuMap = studentRDD.map(line => (line.split(",")(0), line)).collectAsMap()

    val stuBro = sc.broadcast(stuMap)

    scoreRDD.map(line => {
      val s_id = line.split(",")(0)

      val bro = stuBro.value

      //通过学号获取学生信息
      val stuInfo = bro.getOrElse(s_id, "null")

      line + "\t" + stuInfo
    }).foreach(println)

  }

}
