package com.atbeijing.bigdata.spark.core.wc

import org.apache.spark.{SparkConf, SparkContext}

object Spark02_WordCount {

    def main(args: Array[String]): Unit = {

        // TODO - Spark - WordCount
        val conf = new SparkConf().setMaster("local").setAppName("WordCount")
        val sc = new SparkContext(conf)

        val lines = sc.textFile("data/word.txt")
        val words = lines.flatMap( line => line.split(" "))
        // word => (word,1)
        val wordToOne = words.map(
            word => (word, 1)
        )

        // word => List( (word, 1), (word,2), (word, 3) )
        // word => reduce (word,3)(word,3) => (word,6)
        // word => 6
        val groups = wordToOne.groupBy(_._1)
        //groups.map(_._2).reduce()
        val wordCount = groups.mapValues(
            list => {
                list.reduce(
                    (t1, t2) => {
                        (t1._1, t1._2 + t2._2)
                    }
                )
            }
        ).map(_._2)

        wordCount.collect().foreach(println)
        //wordcount.foreach(println)

        // TODO 3. 关闭连接
        sc.stop()

    }
}
