package com.shujia.spark.core

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo22Broadcast {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("Broadcast").setMaster("local")

    val sc = new SparkContext(conf)

    //读取数据
    val students: RDD[String] = sc.textFile("data/students.txt")

//    val ids = List("1500100010", "1500100013", "1500100015", "1500100016")
//
//    //在students里面取出这几个算子
//    val filterRDD: RDD[String] = students.filter(student => {
//      val id: String = student.split(",")(0)
//      ids.contains(id)
//    })
//    filterRDD.foreach(println)
    /**
      * 广播变量
      */
    val ids = List("1500100010", "1500100013", "1500100015", "1500100016")

    //在Driver将一个变量广播出去
    val broIds: Broadcast[List[String]] = sc.broadcast(ids)

    val filterRDD: RDD[String] = students.filter(student => {
      //切分
      val id: String = student.split(",")(0)

      //在Executor使用广播变量
      val value: List[String] = broIds.value

      value.contains(id)
    })
    filterRDD.foreach(println)

    /**
      * 广播变量的实际应用
      *
      *
      * 实现map join
      * 将小表加载内存中，在map端进行关联
      */
    val scores: RDD[String] = sc.textFile("data/score.txt")

    /**
      * collect：将RDD的数据拉取到Driver端的内存
      */
    val list: Array[String] = students.collect()

    val studentMap: Map[String, String] = list.map(stu => {
      val id: String = stu.split(",")(0)
      (id, stu)
    }).toMap

    //将小表广播
    val broStudentMap: Broadcast[Map[String, String]] = sc.broadcast(studentMap)

    val stuScoInfo: RDD[String] = scores.map(sco => {
      val id: String = sco.split(",")(0)

      //读取广播变量
      val value: Map[String, String] = broStudentMap.value

      //使用id到学生表的map获取学生信息
      val studentInfo: String = value.getOrElse(id, "默认值")

      studentInfo + "\t" + sco
    })
    stuScoInfo.foreach(println)
  }
}
