package com.sunzm.spark.core

import com.alibaba.fastjson.JSON
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object BroadCastDemo {
  def main(args: Array[String]): Unit = {
    //准备执行环境
    val conf = new SparkConf().setAppName("RDD示例")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    //读取订单数据
    val orderRDD: RDD[String] = sc.textFile("data/spark/rdd/order.txt")
    //读取用户数据
    val userRDD: RDD[String] = sc.textFile("data/spark/rdd/user.txt")

    //准备将用户数据广播出去（广播较小的数据）
    //先将用户数据变成Map的格式，便于查找
    val userMapRDD: RDD[(String, String)] = userRDD.map(line => {
      val jSONObject = JSON.parseObject(line)
      val userId = jSONObject.getString("userId")
      val userName = jSONObject.getString("userName")

      (userId, userName)
    })

    val userMap: Map[String, String] = userMapRDD.collect().toMap

    val userMapBC: Broadcast[Map[String, String]] = sc.broadcast(userMap)

    //变成key-value形式的RDD
    val resultRDD: RDD[String] = orderRDD.map(line => {
      val jSONObject = JSON.parseObject(line)
      val userId = jSONObject.getString("userId")

      //获取广播变量的值
      val bdUserMap: Map[String, String] = userMapBC.value

      //根据用户Id从Map中查找用户姓名
      val userName = bdUserMap.getOrElse(userId, "未知用户")

      jSONObject.put("userName", userName)

      jSONObject.toJSONString
    })

    resultRDD.foreach(line => {
      println(line)
    })

    println(resultRDD.partitions.size)

  }
}
