package chapter06

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author: 余辉
 * @blog: https://blog.csdn.net/silentwolfyh
 * descriptions:
 * date: 2024 - 08 - 31 9:09 下午
 */
object BroadcastExample {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("BroadcastExample").setMaster("local")
    val sc = new SparkContext(conf)

    // 假设这是一个大的查找表
    val lookupTable = Map("key1" -> "value1", "key2" -> "value2", "key3" -> "value3")

    // 将查找表广播出去
    val broadcastVar = sc.broadcast(lookupTable)

    val data = sc.parallelize(Seq(("key1", 1), ("key2", 2), ("key3", 3)))

    // 使用广播变量进行转换
    val result = data.map { case (key, value) => (key, broadcastVar.value(key), value) }

    // 收集并打印结果
    result.collect().foreach(println)

    /** *
     * 结果：
     * (key1,value1,1)
     * (key2,value2,2)
     * (key3,value3,3)
     */
  }
}