package day1

import org.apache.log4j.{Level, Logger}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}

object RDDStorage {
  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir","D:\\hadoop")
    val sc = new SparkContext(new SparkConf().setAppName("Basic").setMaster("local"))
    Logger.getLogger("org").setLevel(Level.OFF)


    val arr1 = Array(1,2,3,4,5,6,7,8,9)
    val rdd1 =sc.parallelize(arr1)
    val rdd2 = rdd1.map(i => i)


    //当一个RDD转化为其他RDD并使用行动算子的完成之后，前面的内容都会被释放掉
    rdd1.map(i =>{
      println(s"test for rdd1: $i")
    }).count()

    rdd1.map(i =>{
      println(s"test for rdd1: $i")
    }).count()

    println("*******************************")
    //在
//    rdd2.persist(StorageLevel.MEMORY_ONLY)
    rdd2.count()
    rdd2.foreach(println)

    val rdd3 = rdd2.map(i => i * 33 )


  }
}
