package sparkcore.day4.lesson01

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/4/26.
  */
object MapJoinTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("test")
    val sc = new SparkContext(conf)

    val names = Array(Tuple2("001","袁承志"),Tuple2("001","袁将军"),Tuple2("002","金蛇郎君"))
    val clazz = Array(Tuple2("001","一班"), Tuple2("002","二班"))

    val namesRDD = sc.parallelize(names)
    val clazzRDD = sc.parallelize(clazz)


  //  val join: RDD[(String, (String, String))] = namesRDD.join(clazzRDD)

    val clazzs = clazzRDD.collect()

    val broadcastClazzs = sc.broadcast(clazzs)

    namesRDD.map( tuple =>{
      val classID = tuple._1
      val name = tuple._2
      val map = broadcastClazzs.value.toMap
      val className = map.getOrElse(classID,"000班")
      (classID,(name,className))
    }).foreach( tuple =>{
      println("班级："+ tuple._1 + " 姓名："+tuple._2._1 + " 班级名："+tuple._2._2)
    })






    sc.stop()
  }

}
