package com.xf.day05
import org.apache.spark.{SparkConf, SparkContext}

// todo 或者由于闭包、序列化问题，combOp 被重复调用
object DebugAggregate {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("debug").setMaster("local[1]")
    val sc = new SparkContext(conf)

    val rdd16 = sc.parallelize(1 to 4)

    // 查看实际分区数
    println(s"实际分区数: ${rdd16.getNumPartitions}")

    println("=== 分区数据分布 ===")
    rdd16.mapPartitionsWithIndex { (partitionIndex, iterator) =>
      val partitionData = iterator.toList
      println(s"分区 $partitionIndex: ${partitionData.mkString("[", ", ", "]")}")
      partitionData.iterator
    }.count()

    // 使用自定义函数跟踪计算
    var seqCount = 0
    var combCount = 0

    def trackedSeqOp(x1: Int, x2: Int): Int = {
      seqCount += 1
      println(s"seqOp#$seqCount: $x1 * $x2 = ${x1 * x2}")
      x1 * x2
    }

    def trackedCombOp(x3: Int, x4: Int): Int = {
      combCount += 1
      println(s"combOp#$combCount: $x3 + $x4 = ${x3 + x4}")
      x3 + x4
    }

    val result = rdd16.aggregate(3)(trackedSeqOp, trackedCombOp)
    println(s"最终结果: $result")
    println(s"序列操作次数: $seqCount, 合并操作次数: $combCount")

    sc.stop()
  }
}
