package com.doit.spark.day04

import com.doit.spark.day01.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/6/21:49
 * @Author MDK
 * @Version 2021.2.2
 * */
object C02_分布式闭包_Join {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val list: List[(Int, String, Int)] = List[(Int, String, Int)]((1, "袜子", 88), (2, "裤子", 99), (3, "帽子", 66), (4, "褂子", 199))
    val rdd1: RDD[(Int, String, Int)] = sc.makeRDD(list, 2)
    val rdd2: RDD[(String, (Int, String, Int))] = rdd1.map(tp => (tp._2, tp))
    val mp: Map[String, Int] = Map[String, Int](("袜子", 100), ("裤子", 200), ("帽子", 300), ("褂子", 100))

    //map集合不能直接转化成RDD
    val rdd3: RDD[(String, Int)] = sc.makeRDD(mp.toList, 2)
    //join关联会产生shuffle   ("袜子",((1, "袜子", 88),100))
    val resRDD: RDD[(String, ((Int, String, Int), Int))] = rdd2.join(rdd3)
    resRDD.foreach(println)

    println("--------------------------------------------------------------")
    val resRDD2 = resRDD.map(tp => (tp._2._1._1, tp._2._1._2, tp._2._1._3, tp._2._2))
    resRDD2.foreach(println)
  }
}
