package com.niit.spark.rdd.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Date:2025/5/9
 * Author：Ys
 * Description:
 */
object MapGroupByKeyExercise {

  def main(args: Array[String]): Unit = {
    val sparkConf = new
        SparkConf().setMaster("local[*]").setAppName("MapGroupByKeyExercise")
    val sc = new SparkContext(sparkConf)
    sc.setLogLevel("ERROR")
    val purchaseRDD = sc.parallelize(Seq("User1,ProductA", "User2,ProductB",
      "User1,ProductC", "User3,ProductA", "User2,ProductC", "User1,ProductB"))

    val mapRdd: RDD[(String, String)] = purchaseRDD.map(line => {
      val parts: Array[String] = line.split(",")
      (parts(0), parts(1))
    })

    val gpRdd: RDD[(String, Iterable[String])] = mapRdd.groupByKey()

    val resRdd: RDD[(String, Int)] = gpRdd.mapValues(_.toList.distinct.size)

    resRdd.collect().foreach(println)
    sc.stop()
  }

}
