package com.jscloud.spark.scalacount

import org.apache.spark.{SparkConf, SparkContext}

object ScalaWordCount {
  def main(args: Array[String]): Unit = {
    //程序入口类SparkContext
    val conf = new SparkConf()
    conf.setAppName("ScalaWordCount").setMaster("local[*]")

    val sc: SparkContext = new SparkContext(conf)

    val tuples: Array[(String, Int)] = sc.textFile("hdfs://bigdata01:8020/hello.txt")
      .flatMap(line => line.split(" "))
      .map(word => (word, 1))
      .reduceByKey((x, y) => x + y)
      .collect()

    tuples.foreach(println)

    //    println("========================================================")
    //    //简化版
    //    val tuples2: Array[(String, Int)] = sc.textFile("hdfs://bigdata01:8020/hello.txt")
    //      .flatMap(_.split(" "))
    //      .map((_, 1))
    //      .reduceByKey(_ + _)
    //      .collect()
    //
    //    tuples2.foreach(println)


    sc.stop()


  }

}
