package io.a.e

import org.apache.spark.{SparkConf, SparkContext}
  
object RDDFlatMap {  
  def main(args: Array[String]): Unit = {  
    val sparkConf = new SparkConf().setAppName("RDDFlatMap").setMaster("local[*]")
    val sc = new SparkContext(sparkConf)
  
    // 创建一个包含10个字符串元素的RDD  
    val sentencesRDD = sc.parallelize(Seq(
      "Hello world", "This is a test", "Spark is great",  
      "Processing RDDs", "Word counting", "Example code"  
    ))

    // 将每个元素按空格切分成单词，并将所有单词变成小写形式  
    val lowerCaseWordsRDD = sentencesRDD.flatMap(_.split(" ")).map(_.toLowerCase())
    val upperCaseWordsRDD = sentencesRDD.flatMap(_.split(" ")).map(_.toUpperCase())

    // 收集结果并打印  
    lowerCaseWordsRDD.collect().foreach(println)
    upperCaseWordsRDD.collect().foreach(println)
  
    sc.stop()
  }  
}