package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo8ReduceByKey {
  def main(args: Array[String]): Unit = {


    val conf = new SparkConf().setMaster("local").setAppName("Demo8ReduceByKey")

    val sc = new SparkContext(conf)

    /**
      * reduceByKey   只有key-value格式的rdd才有
      * 通过key来对value进行聚合，需要传入一个聚合函数
      */
    val linesRDD: RDD[String] = sc.textFile("spark/data/words.txt")

    val wordsRDD: RDD[String] = linesRDD.flatMap(line => line.split(","))

    val kvRDD: RDD[(String, Int)] = wordsRDD.map(word => (word, 1))

    //统计单词数量

    //直接返回没哦个单词的数量
    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey((x, y) => x + y)


    countRDD.foreach(println)

  }
}
