import org.apache.spark.{SparkConf, SparkContext}

object scala4233{
  def main(args: Array[String]): Unit = {
    // 1. 创建SparkConf和SparkContext
    val conf = new SparkConf().setAppName("ReduceByKeyExample").setMaster("local[*]")
    val sc = new SparkContext(conf)

    // 2. 创建RDD
    val rdd_1 = sc.parallelize(
      List(('a', 1), ('a', 2), ('b', 1), ('c', 1), ('c', 1),('c',2))
    )
    // 3. 使用reduceByKey方法将值相加
    val re_rdd_1 = rdd_1.reduceByKey((a, b) => a + b)
    // 4. 查看结果
    val result = re_rdd_1.collect()
    result.foreach(println)
    // 5. 关闭SparkContext
    sc.stop()
  }
}