package com.imooc.base

import org.apache.spark.{SparkConf, SparkContext}

object wordcount {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("wordcount")
    val sc = new SparkContext(conf)

    val rdd = sc.textFile(path = "E:\\pyProject\\mapreduce\\venv\\data\\The_Man_of_Property.txt")
    val out_rdd = rdd.flatMap(_.split(" "))
        .map(x=>(x,1))
        .reduceByKey(_+_).
        map(x => x._1 +"\t"+ x._2.toString)

    println(out_rdd.foreach(println(_)))

  }
}
