package com.atguigu.sparkcore.rdd.action

import com.atguigu.sparkcore.util.MySparkContextUtil
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

import scala.collection.immutable.Nil

/**
  * 聚合
  * author 剧情再美终是戏
  * mail 13286520398@163.com
  * date 2020/1/7 16:00 
  * version 1.0
  **/
object Reduce {

  def main(args: Array[String]): Unit = {

    // 获取sparkContext
    val sc = MySparkContextUtil.get(args)

    val tuples = List(("a", 1), ("b", 2), ("c", 3))


    // 创建rdd
    val list = List(("a", 1), ("b", 2), ("c", 3))
    val rdd = sc.makeRDD(list, 1)

    // 转换rdd
    val result = rdd.reduce {
      case ((w1, n1), (w2, n2)) => (w1 + w2, n1 + n2)
    }

    // 输出
    println(result)

    // 关闭资源
    MySparkContextUtil.close(sc)
  }

}
