package com.doit.spark.day03

import com.doit.spark.day01.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/5/22:54
 * @Author MDK
 * @Version 2021.2.2
 * */
object C06_WordCount {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val rdd: RDD[String] = sc.textFile("data/word.txt")

    val word: RDD[String] = rdd.flatMap(_.split("\\s+"))
    val group: RDD[(String, Iterable[Int])] = word.map((_, 1)).groupByKey()
    group.map(tp=>{
      var cnt = 0
      val name = tp._1
      for (elem <- tp._2) {
        cnt+=1
      }
      (name,cnt)
    }).foreach(println)
  }
}
