package com.doit.day05

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo01Persist {
  def main(args: Array[String]): Unit = {
       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    val rdd = sc.parallelize(List(1, 2) , 1)

    val rdd1 = rdd.map(e => {
      println("处理1... ...")
      e*2
    })


    val rdd2 = rdd1.map(e => {
      println("处理2... ...")
      e * 2
    })


    val rdd3 = rdd2.map(e => {
      println("处理3... ...")
      e * 2
    })


    val rdd4 = rdd3.map(e => {
      println("处理4... ...")
      e * 2
    })

    // f4(f3(f2(f1(e+>e))))
  //  rdd4.cache()
    rdd4.persist()
    //  默认   将数据缓存在内存中
    //  persist(StorageLevel.MEMORY_ONLY)
    rdd4.collect()

    rdd4.map(e=>(e,e)).groupBy(_._1).foreach(println)
    rdd4.map(e=>(e,e ,e)).groupBy(_._1).foreach(println)


 /*   rdd4.reduce(_+_)
    rdd4.reduce(_+_)
    rdd4.reduce(_+_)*/


  }

}
