package sparkCore

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel

object Demo03_persistence {

  def wc(sc:SparkContext) = {
    val dataRDD:RDD[String] = sc.textFile("D:\\data\\data_spark\\wordCount")
    val wordRDD:RDD[String] = dataRDD.flatMap(_.split(" "))
    val kvRDD:RDD[(String,Int)] = wordRDD.map((_,1))
    //如果使用默认的持久化策略的话，这两个方法完全等价；如果想使用其他的持久化策略，必须使用persist(参数)
    kvRDD.persist()
//    kvRDD.cache()

    println(kvRDD.getStorageLevel)
    println(kvRDD.getStorageLevel.toString())

    val resultRDD:RDD[(String,Int)] = kvRDD.reduceByKey(_+_)
    val result:Unit = resultRDD.foreach(println)
    kvRDD.unpersist()
  }

  def useCheckPoint(sc:SparkContext) = {
    sc.setCheckpointDir("E://test")
    val dataRDD:RDD[String] = sc.textFile("D:\\data\\data_spark\\wordCount")
    val wordRDD:RDD[String] = dataRDD.flatMap(_.split(" "))
    val kvRDD:RDD[(String,Int)] = wordRDD.map((_,1))

    kvRDD.checkpoint()

    val resultRDD:RDD[(String,Int)] = kvRDD.reduceByKey(_+_)
    val result:Unit = resultRDD.foreach(println)
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("create")
    val sc = new SparkContext(conf)
    useCheckPoint(sc)
//    Thread.sleep(1000000000)
  }

}
