package day1

import org.apache.spark.{SparkConf, SparkContext}

object RDDtest {
  def main(args: Array[String]): Unit = {
    System.setProperty("hadoop.home.dir","D:\\hadoop")
    val sc =new SparkContext(new SparkConf().setAppName("yzy").setMaster("local"))
    val RDD = Array(1,2,3,4,5)

    val rdd = sc.parallelize(RDD)

    val rdd_map = rdd.map(i => println(i*i))

    rdd_map.foreach(i => print(i+ " "))
  }


}
