package com.shujia.spark

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel

object Demo18Checkpoint {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[8]").setAppName("map")
    val sc: SparkContext = new SparkContext(conf)


    //设置checkpoint路径
    sc.setCheckpointDir("spark/data/checkpoint")


    val students: RDD[String] = sc.textFile("spark/data/students.txt")

    val studentsRDD: RDD[String] = students.map(line => {
      println("读取数据过程")
      line
    })


    /**
      * checkpoint  将RDD的数据保存到hdfs中,相当于做了一个快照
      */

    //在checkpoint之前可以先cache一下, 前面欸但逻辑就只会计算一次
    studentsRDD.cache()
    studentsRDD.checkpoint()


    studentsRDD
      .map(line => (line.split(",")(4), 1))
      .reduceByKey(_ + _)
      .foreach(println)


    studentsRDD
      .map(line => (line.split(",")(3), 1))
      .reduceByKey(_ + _)
      .foreach(println)


    while (true) {

    }


  }
}
