package com.xzx.spark.core.wordcount

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 *
 * 目的：练习reduce能力
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark_WordCount_Aggregate {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName(getClass.getSimpleName)
    val context = new SparkContext(conf)
    // reduce能实现，Aggregate也能实现
    val map: mutable.Map[String, Int] = context.textFile("input/wc.txt").flatMap(_.split(" "))
      .aggregate(mutable.HashMap[String, Int]())((x, y) => {
        val newCnt: Int = x.getOrElse(y, 0) + 1
        x.update(y, newCnt)
        x
      },
        (x, y) => {
          y.foreach {
            case (key, value) =>
              val newCnt: Int = x.getOrElse(key, 0) + value
              x.update(key, newCnt)
          }
          x
        })
    println(map)
    context.stop()
  }
}
