package day2

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object wordCount {
  def main(args: Array[String]): Unit = {
    System.setProperty("hadoop.home.dir", "D:\\新建文件夹\\HCIA初级\\hadoop")
    Logger.getLogger("org").setLevel(Level.OFF)
    val sc = new SparkContext(new SparkConf().setAppName("Basic").setMaster("local"))

    //    val file_wordCount = sc.textFile("D:\\data\\files\\wordCount.txt").flatMap(_.split(" "))
    //    val pair_wordCount = file_wordCount.map((_,1))
    //    pair_wordCount.reduceByKey(_+_).foreach(println)
    sc.textFile("D:\\新建文件夹\\data\\wordCount.txt")
      .flatMap(_.split(" "))
      .countByValue()
      .foreach(println)



  }
}