package chapter03
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
object Test24_aggByKey {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val conf = new SparkConf().setMaster("local[*]").setAppName("aggByKey")
    val sc = new SparkContext(conf)
    val value = sc.makeRDD(List(("a", 1), ("a", 2), ("c", 3),
      ("b", 4), ("c", 5), ("c", 6)
    ), 2)
    value.foreachPartition(e=>println(e.toList))
    //计算分区内的最大值，计算分区之间最大值的和
    val value1 = value.aggregateByKey(0)(
      (x, y) => math.max(x, y),
      (n, m) => n + m
    )
    println(value1.collect().toList)
    //如果分区内部和分区之间的计算规则一致 可以使用fold
    val value2 = value.foldByKey(0)(_ + _)
    println(value2.collect().toList)
  }
}
