import org.apache.spark.{SparkConf, SparkContext}  
  
object RDDTransformations {  
  def main(args: Array[String]): Unit = {  
    val conf = new SparkConf().setAppName("RDD Transformations").setMaster("local[*]")  
    val sc = new SparkContext(conf)  
  
    val rdd = sc.parallelize(Seq("Hello World", "Spark Cluster", "Data Analysis", "Distributed Computing", "Scala Programming", "Big Data", "Machine Learning", "Artificial Intelligence", "Apache Hadoop", "Apache Spark"))  
  
    val wordsRDD = rdd.flatMap(_.split(" ")).map(_.toLowerCase())  
  
    wordsRDD.collect().foreach(println)  
  
    sc.stop()  
  }  
}