package com.atbeijing.bigdata.spark.core.rdd.operator.dep

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Spark01_RDD_Dep {

    def main(args: Array[String]): Unit = {

        val conf = new SparkConf().setMaster("local[*]").setAppName("ActionOperator")
        val sc = new SparkContext(conf)

        val rdd = sc.textFile("data/word.txt")
        println(rdd.toDebugString)
        println("**********************************")
        val rdd1 = rdd.map((_,1))
        println(rdd1.toDebugString)
        println("**********************************")
        val rdd2 = rdd1.reduceByKey(_+_)
        println(rdd2.toDebugString)
        println("**********************************")
        rdd2.collect()
        sc.stop()

    }
}
