package com.doit.day04

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.immutable
import scala.collection.immutable.HashMap

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo04BroadCast {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
    val sc = SparkContext.getOrCreate(conf)

    val  mp = Map[String,String](
      ("uid01" , "小乔") ,
      ("uid02" , "大乔") ,
      ("uid03" , "王昭君") ,
      ("uid04" , "貂蝉") ,
      ("uid05" , "杨贵妃")
    )
    // 广播
    val bc = sc.broadcast(mp)


    sc.textFile("data/orders/order.csv").map(line => {
      val arr = line.split(",")
      val uid = arr(4)
      // mp 闭包变量
      //  val name = mp.getOrElse(uid, "null")
      // 广播变量
      val mapData = bc.value
      val name = mapData.getOrElse(uid, "null")

      (arr(0) , arr(1).toDouble , arr(2) , arr(3) , arr(4) , name)
    }).foreach(println)


  }

}
