package com.lhc.rdddemo

import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ListBuffer

object ForeacheAndForeachPartitionApp {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().appName("ForeacheAndForeachPartitionApp").master("local[2]").getOrCreate()
    val sparkContext = sparkSession.sparkContext

    val students = new ListBuffer[String]()

    for (i <- 1 to 100) {
      students += "stu:" + i
    }
    val stuRDD = sparkContext.parallelize(students)

    stuRDD.foreach(println)  //Actions (launch a job to return a value to the user program)

    sparkContext.textFile("")
      .flatMap(x=>x.split(" "))
      .map(x=>(x,1))
      .reduceByKey((a,b) => (a+b))

    sparkContext.textFile("").flatMap(x=>x.split(" "))

  }
}
