package org.huangrui.spark.scala.core.rdd.operate.action

import org.apache.spark.{SparkConf, SparkContext}


/**
 * @Author hr
 * @Create 2024-10-18 9:25
 */
object Spark03_Operate_Action_1 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("spark")
    val sc = new SparkContext(conf)
    val rdd = sc.parallelize(List(4, 2, 3, 1), 2)
    val newRdd = rdd.map((num: Int) => num * 2)

    // TODO countByKey : 将结果按照Key计算数量
    /*
      4, 2, 3, 1
      ------------------------------
      (a, 4), (a,2), (a, 3), (a, 1)
      ------------------------------
      (a, 4)
     */

    newRdd.map((num: Int) => ("a", num)).countByKey().foreach(println)

    sc.stop()
  }
}
