package rddSummary.transition.key_value_type

import org.apache.spark.{SparkConf, SparkContext}

object test_reduceByKey {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("test").setMaster("local")
    val sparkContext = new SparkContext(conf)

    val dataRDD1 = sparkContext.makeRDD(List(("a",1),("b",2),("c",3),("a",1),("b",2)))

    /**
     *可以将数据按照相同的Key 对Value 进行聚合
     */

    val value1 = dataRDD1.reduceByKey(_ + _)
//    val value1 = dataRDD1.reduceByKey((x, y) => (x+y))
    val value2 = dataRDD1.reduceByKey(_ + _, 2)

    value1.collect().foreach(println)
    value2.collect().foreach(println)

    sparkContext.stop()
  }

}
