package com.atguigu.bigdata.spark

import org.apache.spark.{Partitioner, SparkConf, SparkContext}

//创建自定义分区
object Spark02_CheckPoint12{

  def main(args: Array[String]): Unit = {
    //创建SparkConf
    //s设定spark计算框架的运行环境
    val config: SparkConf = new SparkConf().setMaster("local[*]").setAppName("wordCount")
    //创建Spark上下文环境
    val sc = new SparkContext(config)
//   设置检查点保存的目录地址
    sc.setCheckpointDir("cp")

    val rdd = sc.makeRDD(List(1,2,3,4,1,2,3))
    
    val mapRDD = rdd.map((_,1))
    val reduceRDD = mapRDD.reduceByKey(_+_)

    reduceRDD.checkpoint()
    reduceRDD.foreach(println)
    println(reduceRDD.toDebugString)
    sc.stop()
  }
}
