package com.bigdata.spark.core.rdd.operator.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author : ranzlupup
 * @date : 2023/3/1 21:30
 */
object RDD_Action_ {
    def main(args: Array[String]): Unit = {
        val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")
        val sc: SparkContext = new SparkContext(sparkConf)
        val rdd: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4, 5, 6))
        val rdd3: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4, 5, 6), 2)
        val rdd2: RDD[(Int, String)] = sc.makeRDD(List((1, "a"), (1, "a"), (1, "a"), (2, "b"), (3, "c"), (3, "c")))

        println(rdd.reduce(_ + _))

        println(rdd.collect().mkString(", "))

        println(rdd.count())

        println(rdd.first())

        println(rdd.take(3).mkString(", "))

        println(rdd.takeOrdered(4)(Ordering.Int.reverse).mkString(", "))

        println(rdd.aggregate(0)(_ + _, _ + _))
        println(rdd.fold(0)(_ + _))

        println("===================================")
        println(rdd2.countByKey())
        println(rdd2.countByValue())

        println("===================================")
        println(rdd3.collect().foreach(println))
        println("===================================")
        println(rdd3.foreach(println))


        println()
        sc.stop()
    }
}
