package com.xzx.spark.core.action

import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-08-07 9:41 上午
 */
object Spark009_Save {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setMaster("local[*]").setAppName(getClass.getSimpleName))
    // 分区的数据通过初始值和分区内的数据进行聚合，然后再和初始值进行分区间的数据聚合
    val result: collection.Map[(Int, String), Long] = sc.makeRDD(List((1, "a"), (1, "a"), (1, "a"), (2,"b"), (3, "c"), (3, "c")))
      .countByValue()
    println(result)// Map((3,c) -> 2, (1,a) -> 3, (2,b) -> 1)
    val result2: collection.Map[Int, Long] = sc.makeRDD(List((1, "a"), (1, "a"), (1, "a"), (2,"b"), (3, "c"), (3, "c")))
      .countByKey()
    println(result2)// Map(1 -> 3, 2 -> 1, 3 -> 2)
    sc.stop()
  }
}
