package com.zhang.spark_1.spark_core.persist

import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title:
 * @author: zhang
 * @date: 2021/12/8 14:47 
 */
object Spark01_Persist {

  def main(args: Array[String]): Unit = {
    //获取spark的连接
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("operator")
    val sc: SparkContext = new SparkContext(conf)

    val rdd: RDD[String] = sc.makeRDD(List("hello scala", "hello spark"))
    val flatMapRdd: RDD[String] = rdd.flatMap(_.split(" "))

    val mapRDD: RDD[(String, Int)] = flatMapRdd.map(word => {
      println("------------")
      (word, 1)
    })
    //mapRDD.cache()
    mapRDD.persist()
    mapRDD.reduceByKey(_+_).collect().foreach(println)
    println("=======================")
    val groupRDD: RDD[(String, Iterable[Int])] = mapRDD.groupByKey()
    groupRDD.collect().foreach(println)

    sc.stop()
  }
}
