package com.atbeijing.bigdata.spark.core.rdd.persist

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Spark01_RDD_Persist {

    def main(args: Array[String]): Unit = {

        val conf = new SparkConf().setMaster("local").setAppName("Persist")
        val sc = new SparkContext(conf)

        val rdd = sc.makeRDD(
            List("Hello Scala")
        )
        val rdd1 = rdd.flatMap(_.split(" "))
        val rdd2 = rdd1.map(
            s => {
                println(s)
                (s, 1)
            })

        val rdd3 = rdd2.reduceByKey(_+_)
        rdd3.collect().foreach(println)
        println("**************************************")
        val rdd00 = sc.makeRDD(
            List("Hello Scala")
        )

        val rdd11 = rdd00.flatMap(_.split(" "))
        val rdd22 = rdd1.map(
            s => {
                println(s)
                (s, 1)
            })
        val rdd33 = rdd22.groupByKey()
        rdd33.collect().foreach(println)

        sc.stop()
    }
}
