package org.hadoop.spark
import org.apache.spark.{SparkConf, SparkContext}
object ShareVar {
  def main(args: Array[String]): Unit = {
    val conf:SparkConf = new SparkConf();
    conf.setMaster("local[*]");
    conf.setAppName("ShareVariable");
    val sc:SparkContext = new SparkContext(conf);
    sc.setLogLevel("WARN");
    //声明变量,Map(key)其中key参数，是指根据key值查询value的数据
    val lookup = Map(1->"a",2->"b",3->"c",4->"d");
    val rdd = sc.parallelize(Seq(1,2,4));
    val rdd2 = rdd.map(lookup(_));
    println(rdd2.collect().toSet);
    sc.stop();
  }
}
