package com.zyh.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object CheckPointTest {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    val conf: SparkConf = new SparkConf()
      .setAppName("cpt")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    //开启checkpoint需要设置数据保存位置
    sc.setCheckpointDir("hdfs://hadoop10:9000/checkpoint")
    val rdd1: RDD[Int] = sc.makeRDD(1 to 10, 2)
    val rdd2: RDD[String] = rdd1.map(item => item + "-" + System.currentTimeMillis())

    rdd2.cache()
    rdd2.checkpoint()

    //第一次收集
    rdd2.foreach(println)//checkpoint将在第一次收集时重新计算并保存结果到磁盘，如果已存在缓存则直接保存缓存，不在重新计算，配合cache()连用

    Thread.sleep(3000)
    rdd2.foreach(println)

    Thread.sleep(3000)
    rdd2.foreach(println)

    sc.stop()
  }
}
