package com.li.spark

import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}

object CheckpointOp {
  def main(args: Array[String]): Unit = {
    val sc: SparkContext = getSparkContext
    if (args.length == 0) {
      System.exit(100);
    }
    val outputPath = args(0)
    //设置checkpoint目录
    sc.setCheckpointDir("hdfs://bigdata01:9000/chk001")
    //先对要执行checkpoint的RDD做磁盘持久化
    val dataRDD = sc.textFile("hdfs://bigdata01:9000/hello_10000000.dat").persist(StorageLevel.DISK_ONLY)
    //对rdd执行checkpoint操作
    dataRDD.checkpoint()

    dataRDD.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _).saveAsTextFile(outputPath)

    sc.stop();
  }


  private def getSparkContext = {
    //创建SparkContext
    val conf = new SparkConf();
    conf.setAppName("CheckpointOp");
    new SparkContext(conf);
  }
}
