package day2

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object wordCount {
  def main(args: Array[String]): Unit = {
    System.setProperty("hadoop.home.dir","D:\\hadoop")
    val sc = new SparkContext(new SparkConf().setAppName("Basic").setMaster("local"))
    Logger.getLogger("org").setLevel(Level.OFF)

    val file_wordcount = sc.textFile("D:\\data\\HCIP\\files\\wordCpunt.txt").flatMap(i => i.split(" "))
    val pair_wordcount = file_wordcount.map(i => (i,1))
    val wordcount = pair_wordcount.reduceByKey((x,y)=>x+y).foreach(println)

    val file_count = file_wordcount.countByValue().foreach(println)



  }
}
