import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object TransformWordsRDDExample {

  def main(args: Array[String]): Unit = {
    // 创建Spark配置对象
    val conf = new SparkConf().setAppName("TransformWordsRDD").setMaster("local[*]")
    val sc = new SparkContext(conf)

    // 创建包含10个字符串的RDD，每个字符串由多个单词组成，用空格分隔
    val initialRdd = sc.parallelize(Seq(
      "Hello World This Is A Test",
      "Spark Programming Is Fun",
      "Apache Spark Is Distributed",
      "RDDs Are Resilient",
      "Big Data Processing",
      "Data Science With Scala",
      "Machine Learning Algorithms",
      "Cluster Computing Paradigm",
      "Spark SQL For Analytics",
      "GraphX For Graph Processing"
    ))

    // 将每个字符串按空格切分成单词，并将所有单词转成小写形式
    val transformedWordsRdd = initialRdd.flatMap(_.split(" "))
                                          .map(_.toLowerCase)

    // 打印转换后的单词列表
    transformedWordsRdd.foreach(println)

    // 停止SparkContext
    sc.stop()
  }
}