package wc

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Spark_WordCount2 {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("wordCount")
    val sc = new SparkContext(sparkConf)
    val lines: RDD[String] = sc.textFile("data/test.txt")
    val word: RDD[String] = lines.flatMap(
      item => {
        item.split(" ")
      }
    )
    val wordOne: RDD[(String, Int)] = word.map(
      word => (word, 1)
    )
    //    val wordGroup: RDD[(String, Iterable[(String, Int)])] = wordOne.groupBy(list=>(
    //      list._1
    //    ))
    //    val wordCount: RDD[(String, Int)] = wordGroup.map {
    //      case (word, list) => {
    //        list.reduce(
    //          (t1, t2) => {
    //            (t1._1, t1._2 + t2._2)
    //          }
    //        )
    //      }
    //    }
    val wordCount: RDD[(String, Int)] = wordOne.reduceByKey(
      (x, y) => {
        x + y
      }
    )
    wordCount.foreach(println)


    sc.stop()
  }

}
