package com.xf.day05

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object TestFold {
  def main(args: Array[String]): Unit = {
    // 创建 SparkConf 对象
    val conf = new SparkConf()
      .setAppName("WordCount")
      .setMaster("local[4]")
      .set("spark.ui.port", "8080")
      .set("spark.driver.host", "127.0.0.1")

    // 创建 SparkContext 对象
    val sc = new SparkContext(conf)

    val RddInt :RDD[Int] = sc.parallelize(List(1, 2, 3, 4, 5, 6, 2, 5, 1))


    // 查看实际分区数
    println(s"实际分区数: ${RddInt.getNumPartitions}")

    println("=== 分区数据分布 ===")
    RddInt.mapPartitionsWithIndex { (partitionIndex, iterator) =>
      val partitionData = iterator.toList
      println(s"分区 $partitionIndex: ${partitionData.mkString("[", ", ", "]")}")
      partitionData.iterator
    }.count()

    val foldInt: Int = RddInt.fold(4)((x, y) => x + y)
    println(foldInt) // 33
  }
}
