package com.zhang.spark_2.com.zhang.core.transform

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title:
 * @author: zhang
 * @date: 2022/2/12 16:29 
 */
object Spark08_combineByKey {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("map")
    val sc = new SparkContext(conf)

   sc.makeRDD(
     List(
       ("a",1),("a",2),("b",3),
       ("b",4),("b",5),("a",6),
     ),2
   ).combineByKey(
     (_,1),
     (t:(Int,Int),v)=>{
       (t._1+v,t._2+1)
     },
     (t1:(Int,Int),t2:(Int,Int))=>{
       (t1._1+t2._1,t1._2+t2._2)
     }
   ).map{
     case (s,(v1,v2))=>{
       (s,v1/v2)
     }
   }.collect().foreach(println)


  }
}
