package com.atguigu0.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description: xxx
 * @time: 2020/6/11 14:41
 * @author: baojinlong
 **/
object RddExercise02 {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("myWordCount").setMaster("local[*]")
    // 创建sparkContext对象,转换操作是懒执行的,并不会马上触发job
    val sc: SparkContext = new SparkContext(sparkConf)
    println("1:使用cache作为缓存")
    val value: RDD[String] = sc.parallelize(Array("atguigu"))
    val value1: RDD[String] = value.map(_ + System.currentTimeMillis())
    println("收集起来结果", value1.collect())
    value1.cache()
    println("从缓存中直接收集后每次结果是一样的.", value1.collect())
    println("2:使用checkPoint来作为缓存,注意checkPoint会重新触发一个任务,一般常用是先cache然后再checkPoint:value3.cache.checkpoint")
    sc.setCheckpointDir("E:/test-data/input")
    val value2: RDD[String] = value.map(_ + System.currentTimeMillis())
    value2.checkpoint()
    value2.collect()
    sc.stop()
    println("ok-end")
  }
}
