package com.xf.day05
import org.apache.spark.{SparkConf, SparkContext}

object DebugAggregate2 {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("debug").setMaster("local[1]")
    val sc = new SparkContext(conf)

    val rdd16 = sc.parallelize(1 to 4)

    // 查看实际分区数
    println(s"实际分区数: ${rdd16.getNumPartitions}")

    println("=== 分区数据分布 ===")
    rdd16.mapPartitionsWithIndex { (partitionIndex, iterator) =>
      val partitionData = iterator.toList
      println(s"分区 $partitionIndex: ${partitionData.mkString("[", ", ", "]")}")
      partitionData.iterator
    }.count()

    // 创建累加器来跟踪操作次数
    val seqCountAcc = sc.longAccumulator("seqOpCount")
    val combCountAcc = sc.longAccumulator("combOpCount")

    // 在mapPartitionsWithIndex中执行aggregate，这样可以获得分区信息
    val result = rdd16.mapPartitionsWithIndex { (partitionIndex, iterator) =>
      var seqCount = 0

      def trackedSeqOp(x1: Int, x2: Int): Int = {
        seqCount += 1
        seqCountAcc.add(1)
        println(s"分区$partitionIndex - seqOp#$seqCount: $x1 * $x2 = ${x1 * x2}")
        x1 * x2
      }

      // 在每个分区内执行序列操作
      val partitionResult = iterator.fold(3)(trackedSeqOp)
      println(s"分区$partitionIndex 的中间结果: $partitionResult")
      Iterator(partitionResult)
    }.collect() // 收集各个分区的结果

    // 在driver端执行合并操作
    var combCount = 0
    def trackedCombOp(x3: Int, x4: Int): Int = {
      combCount += 1
      combCountAcc.add(1)
      println(s"combOp#$combCount (合并操作): $x3 + $x4 = ${x3 + x4}")
      x3 + x4
    }

    val finalResult = result.fold(3)(trackedCombOp)

    println(s"最终结果: $finalResult")
    println(s"序列操作次数: ${seqCountAcc.value}, 合并操作次数: ${combCountAcc.value}")

    sc.stop()
  }
}