package com.hefei.rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author caizhengjun
 * @date 2020/8/28 7:01 下午
 */
object Spark06_RDD_combineByKey {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getSimpleName)

    val sc: SparkContext = new SparkContext(conf)

    val list: List[(String, Int)] = List(("a", 88), ("b", 95), ("a", 91), ("b", 93), ("a", 95), ("b", 98))

    val listRDD: RDD[(String, Int)] = sc.makeRDD(list)

    val valueRDD: RDD[(String, (Int, Int))] = listRDD.combineByKey(
      v => (v, 1),
      (t: (Int, Int), v) => (t._1 + v, t._2 + 1),
      (t1: (Int, Int), t2: (Int, Int)) => (t1._1 + t2._1, t1._2 + t2._2)
    )

    println(valueRDD.collect().mkString(","))

    val resultRDD: RDD[(String, Int)] = valueRDD.map {
      case (key, (sum, count)) => (key, sum / count)
    }
    println(resultRDD.collect().mkString(","))

    sc.stop()
  }
}
