package cn.aijson.demo.rdd

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}

object RDDCacheCheckpoint {

  def main(args: Array[String]): Unit = {
    // 创建环境
    val conf: SparkConf = new SparkConf().setAppName("spark").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    sc.setLogLevel("WARN")

    //2读文件,生成分布式数据集RDD
    //词频统计进行分区操作
    val lines: RDD[String] = sc.textFile("data/input/words.txt")
    val result: RDD[(String, Int)] = lines.filter(StringUtils.isNoneBlank(_))
      .flatMap(_.split(" "))
      .map((_, 1))
      .reduceByKey(_+_)

    println("%%%%%%%%%%将结果缓存持久化",result)
    //result.cache()//底层persist()
    //result.persist()//底层persist(StorageLevel.MEMORY_ONLY)
    result.persist(StorageLevel.MEMORY_AND_DISK)//底层persist

    println("%%%%%%%%%%设置检查点")
    sc.setCheckpointDir("./data/checkpoint")
    result.checkpoint()

    println("%%%%%%%%%%将缓存持久化清空")
    result.unpersist()

  }
}
