package com.ww.spark.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object WordCountScala2 {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("wordCount")
    conf.setMaster("local")

    val sc = new SparkContext(conf)


    //简化版
    val fileRDD1: RDD[String] = sc.textFile("data/testdata.txt")
    val res = fileRDD1.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)

    val value1 = res.map((x) => (x._2, 1)).reduceByKey(_ + _)

    value1.foreach(println)
    res.foreach(println)
    Thread.sleep(10000000)


  }

}
