package SparkRDD.RDD算子.Transformations.聚合操作

import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test


/*
  * aggregateByKey 也是按照Key聚合value
  *  zeroValue   初始值
  *  seqOp       转换每一个值的函数
  *  combOp      聚合函数，聚合的是转换过的值
  *
  * aggregateByKey 特别适用于先处理数据，后聚合的情况
 */
class aggregateByKeyTest {

  val conf = new SparkConf().setMaster("local[6]").setAppName("sample")
  val sc   = new SparkContext(conf)

  @Test
  def aggregateByKeyTest: Unit ={

    val rdd = sc.parallelize(Seq(("手机",10.0),("手机",15.0),("电脑",20.0)))
    rdd.aggregateByKey(zeroValue = 0.8)( (zeroValue,item)=>item*zeroValue , (curr,agg)=>curr+agg )
      .collect()
      .foreach(println(_))

  }
}
