package cn.doitedu.day05

import cn.doitedu.day01.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @Date 22.4.3
 * @Created by HANGGE
 * @Description
 */
object A05_行动算子_CountByKey {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val wordAndOne = sc.textFile("data/a.txt").flatMap(_.split("\\s+")).map((_, 1))
    val res: RDD[(String, Int)] = wordAndOne.reduceByKey(_ + _)
    // 行动算子  将结果返回到Driver端
    val mp: collection.Map[String, Long] = wordAndOne.countByKey()
    mp.foreach(println)


  }

}
