package com.xzx.spark.core.wordcount

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark_WordCount_CombineByKey {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark_WordCount_AggregateByKey")
    val context = new SparkContext(conf)
    val mapRDD: RDD[(String, Int)] = context.textFile("input/wc.txt").flatMap(_.split(" ")).map((_, 1))
    val combineByKeyRDD = mapRDD.combineByKey((v: Int) => 1,
      (acc: Int, v: Int) => acc + v,
      (acc1: Int, acc2: Int) => acc1 + acc2)
    println(combineByKeyRDD)
    //    combineByKeyRDD.collect().foreach(println)

    context.stop()
  }
}
