import org.apache.spark.{SparkConf, SparkContext}
object ReduceBYKey {
  def main(args: Array[String]): Unit = {
    // Spark配置初始化
    val conf = new SparkConf()
      .setAppName("ParallelizeRDD")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)


    val rdd_1 = sc.parallelize(List(('a', 1), ('a', 2), ('b', 1), ('c', 1), ('c', 1)))
    val re_rdd_1 = rdd_1.reduceByKey((a, b) => a + b)
    val result = re_rdd_1.collect()
    result.foreach(println)

    sc.stop()
  }


}
