package com.atguigu.sparkcore.day01.singlevalue

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Author atguigu
 * Date 2020/10/27 14:43
 */
object WordCountGoupBy {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("WordCountGoupBy").setMaster("local[2]")
        val sc: SparkContext = new SparkContext(conf)
        val lineRDD: RDD[String] = sc.textFile("c:/1128.txt")
        val wordRDD: RDD[String] = lineRDD.flatMap(_.split(" "))
        val wordAndWords: RDD[(String, Iterable[String])] = wordRDD.groupBy(word => word)
        //        val wordCountRDD = wordAndWords.map(x => (x._1, x._2.size))
        /*val wordCountRDD = wordAndWords.map(x => {
            x match {
                case (word, it) => (word, it.size)
            }
        })*/
        // (a,(b,(c,(d, 2)))
        val wordCountRDD = wordAndWords.map {
            case (word, it) => (word, it.size)
        }
        wordCountRDD.collect.foreach(println)
        Thread.sleep(10000000)
        sc.stop()
        
    }
}
