package com.yanggu.spark.core.rdd.transform.keyvalue

import org.apache.spark.{SparkConf, SparkContext}

//Key-Value类型-GroupByKey算子
object RDD18_GroupByKey {

  def main(args: Array[String]): Unit = {

    //1. 创建sparkConf配置对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")

    //2. 创建spark上下文对象
    val sparkContext = new SparkContext(sparkConf)

    //3. 从内存中创建RDD
    val rdd = sparkContext.makeRDD(Array(("a", 1), ("b", 2), ("c", 1), ("a", 2)))

    //4. groupByKey
    //按照key进行分组, 将key相同的数据放在一起。形成Iterable
    val value = rdd.groupByKey()

    //5. 打印
    value.foreach {
      case (key, values) =>
        val sum = values.toList.sum
        println("key: " + key + ", sum: " + sum)
    }

    //6. 释放资源
    sparkContext.stop()
  }

}
