package org.apache.spark.examples

import org.apache.spark.{SparkConf, SparkContext}

object RDDFlatMapOperations {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("RDD FlatMap Operations").setMaster("local[1]")
    val sc = new SparkContext(conf)

    // 1. 基础flatMap：将每个数字展开为其因子
    val numbersRDD = sc.parallelize(2 to 10)
    println("\n=== 原始数据 ===")
    numbersRDD.collect().foreach(x => print(s"$x "))
    println()

    def factors(n: Int): Seq[Int] = {
      (1 to n).filter(k => n % k == 0)
    }
    
    val factorsRDD = numbersRDD.flatMap(factors)
    println("\n=== 展开为因子 ===")
    factorsRDD.collect().foreach(x => print(s"$x "))
    println()

    // 2. 数字序列展开：将每个数字展开为从1到该数字的序列
    val sequenceRDD = numbersRDD.flatMap(x => 1 to x)
    println("\n=== 展开为序列 ===")
    sequenceRDD.collect().foreach(x => print(s"$x "))
    println()

    // 3. 字符串处理：将数字转换为字符串并展开为字符
    val digitsRDD = numbersRDD.flatMap(_.toString)
    println("\n=== 展开为字符 ===")
    digitsRDD.collect().foreach(x => print(s"$x "))
    println()

    // 4. Option处理：处理可能为空的计算结果
    def safeDivide(n: Int): Option[Int] = {
      if (n % 2 == 0) Some(n / 2) else None
    }
    
    val optionRDD = numbersRDD.flatMap(x => safeDivide(x))
    println("\n=== Option展开(偶数除2) ===")
    optionRDD.collect().foreach(x => print(s"$x "))
    println()

    // 5. 多层序列展开：生成九九乘法表
    val multiplicationTableRDD = numbersRDD.flatMap(i => 
      (1 to i).map(j => s"$j x $i = ${j * i}")
    )
    println("\n=== 九九乘法表 ===")
    multiplicationTableRDD.collect().foreach(println)

    // 6. 组合生成：生成数字的所有可能的两两组合
    val numbers = numbersRDD.collect() // 先收集数字
    val combinationsRDD = numbersRDD.flatMap(x => 
      numbers.map(y => (x, y))
    )
    println("\n=== 数字组合(前10个) ===")
    combinationsRDD.take(10).foreach(println)

    // 7. 区间展开：为每个数字生成一个区间
    val rangeRDD = numbersRDD.flatMap(x => 
      if (x % 2 == 0) x - 1 to x + 1 else Seq.empty[Int]
    )
    println("\n=== 偶数区间展开 ===")
    rangeRDD.collect().foreach(x => print(s"$x "))
    println()

    // 8. 条件展开：根据条件生成不同的序列
    val conditionalRDD = numbersRDD.flatMap(x => 
      if (x % 3 == 0) Seq(x, x * 2, x * 3)
      else if (x % 2 == 0) Seq(x, x * 2)
      else Seq(x)
    )
    println("\n=== 条件序列展开 ===")
    conditionalRDD.collect().foreach(x => print(s"$x "))
    println()

    // 9. 嵌套集合展开
    val nestedData = sc.parallelize(Seq(
      (1, List(1, 2, 3)),
      (2, List(4, 5, 6)),
      (3, List(7, 8, 9))
    ))
    val flattenedRDD = nestedData.flatMap { case (n, list) =>
      list.map(x => s"$n-$x")
    }
    println("\n=== 嵌套集合展开 ===")
    flattenedRDD.collect().foreach(println)

    // 10. 字符串分词和处理
    val sentences = sc.parallelize(Seq(
      "Hello World",
      "Apache Spark is awesome",
      "Big Data Processing"
    ))
    val wordsRDD = sentences.flatMap(_.split(" ").map(_.toLowerCase))
    println("\n=== 句子分词 ===")
    wordsRDD.collect().foreach(x => print(s"$x "))
    println()

    // 暂停以便查看Spark UI
    Thread.sleep(300000)

    sc.stop()
  }
} 