package com.bigdata.core.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 将K,V格式的RDD回收到Driver端作为Map使用
 */
object Demo10_collectAsMap {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local").setAppName("collectAsMap")
    val sc = new SparkContext(conf)
    sc.setLogLevel("error")

    val infos: RDD[(String, Double)] = sc.parallelize(List[(String, Double)](("zhangsan", 78.4),
      ("lisi", 32.6), ("wangwu", 90.9)))

    // 是往driver回收，driver在client启动是有可能OOM的
    val result: collection.Map[String, Double] = infos.collectAsMap()
    result.foreach(println)


  }
}
