import org.apache.spark.sql.SparkSession
object distinck{
  def main(args: Array[String]):Unit ={
    val spark=SparkSession.builder
      .appName(name= "LocaLHodeDemo")
      .master(master="local[*]")
      //本地模式,使用所有核心
      .getOrCreate()
    //获取 SparkContext
    val sc =spark.sparkContext
    try {
      val rdd = sc.parallelize(List(('a',1),('a',1),('b',1),('c',1)))
      rdd.distinct().collect
      val distinctRDD = rdd.distinct()
      val result = distinctRDD.collect()
      // 执行去重操作并收集结果
      val distinctResult = rdd.distinct().collect()

      // 打印统计信息
      println("================ 去重结果 ================")
      println(s"原始数据量: ${rdd.count()} 条")
      println(s"去重后数量: ${distinctResult.length} 条\n")

      // 打印明细结果
      println("【去重明细】")
      distinctResult.foreach { case (key, value) =>
        println(s"键: $key\t 值: $value")
      }
    }
    finally {
      spark.stop()
    }
  }
}