import org.apache.spark.{SparkConf, SparkContext}

object Demo {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("helloSpark")
    //            sc spark context 创建spark 上下文
    val sc = new SparkContext(conf)
    val arr = Array("Hello Scala", "hello Spark", "helo java", "hello hadoop")

    val newArr = sc.parallelize(arr)
    val word = newArr.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _).collect()
    word.foreach(println(_))
    println("----------")

  }
}

