package com.dxf.bigdata.D05_spark_again

import org.apache.spark.{SparkConf, SparkContext}

object Map {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")

    val sc = new SparkContext(sparkConf)

    val value = sc.makeRDD(List(1, 23, 4, 5, 6), 2)

    val value1 = value.map(x=> {
      println("<<<<<<")
      x*2
    })
    val ints = value1.collect()
    ints.foreach(println)
  }

}
