package com.niit.spark.rdd.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Date:2025/5/6
 * Author：Ys
 * Description:
 */
object GroupByKeyExercise {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("GroupByKeyExercise")
    val sc = new SparkContext(sparkConf)
    val salesRDD = sc.parallelize(Seq(("ProductA", 100.5), ("ProductB", 200.8),
      ("ProductA", 150.2), ("ProductC", 300.0)))

    val gpRDD: RDD[(String, Iterable[Double])] = salesRDD.groupByKey()

    //求每类商品的销售额总和
    val resRdd: RDD[(String, Double)] = gpRDD.mapValues(_.sum)

    resRdd.collect().foreach(println)

    sc.stop()
  }

}
