package com.spark.WorCount

import org.apache.spark.{SparkConf, SparkContext}

object CheckpointByScala {
  def main(args: Array[String]): Unit = {
    val sparkConf=new SparkConf()
      .setAppName("TopNByScala")
      .setMaster("local")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[Person]))
    val sc=new SparkContext(sparkConf)
    val fileRDD=sc.textFile("datas/video_info.log")
    //1.设置checkpoint的保存目录
    sc.setCheckpointDir("hdfs://hadoop101:9000/chk001")
    //2.对rdd执行checkpoint操作
    fileRDD.checkpoint()
    fileRDD.foreach(println(_))
    sc.stop()
  }

}

case class Person(name:String,age:Int) extends Serializable
