package com.lb.bigdata.spark.core.p2

import com.lb.bigdata.spark.core.p2._02CombineByKeyOps.Student
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.spark.{SparkConf, SparkContext}

/**
 * action的行动算子操作
 *   count 计算当前rdd中的数据条数
 *   foreach 遍历
 *   take
 *   --------------------
 *   collect
 *   reduce
 *   countByKey
 *   saveAsXxx
 *   foreachPartition
 */
object _04ActionOps {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
                .setMaster("local[*]")
                .setAppName(s"${_04ActionOps.getClass.getSimpleName}")
        val sc = new SparkContext(conf)

        val stu = sc.parallelize(List(
            Student("郭雪磊", 18, "山东"),
            Student("单 松", 20, "山东"),
            Student("刘宇航", 18, "河北"),
            Student("王健", 18, "河南"),
            Student("许迎港", 18, "河北"),
            Student("元永劫", 18, "黑龙江"),
            Student("林博", 18, "黑龙江"),
            Student("李佳奥", 18, "河南"),
            Student("冯世明", 18, "黑龙江"),
            Student("肖楚轩", 18, "山东"),
            Student("张皓", 18, "河南"),
            Student("冯岩", 18, "黑龙江")
        ), 2)

        /*
            collect 就是讲rdd中的数据从executor端拉取到driver端
                所以在执行该算子的时候需要注意，又能因为数据过大，造成driver的OOM(OutOfMemory)
                所以尽量在collect之前先filter过滤或者take
          */
        println("----------collect----------")
        stu.collect().foreach(println)

        /*
            reduceByKey是一个transformation操作，而reduce是一个action操作
         */
        println("----------reduce----------")
        val rdd = sc.parallelize(1 to 100)
        println(rdd.reduce(_+_))

        println("----------countByKey----------")

        val ret = stu.map(stu => (stu.province, stu))
        ret.countByKey().foreach(println)

        println("----------saveXxx----------")
        ret.saveAsHadoopFile(
            "D:/province.txt",
            classOf[Text],
            classOf[Student],
            classOf[TextOutputFormat[Text, Student]]
        )

        sc.stop()
    }
}
