package com.darrenchan.spark.rdd

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 采用groupby实现和reduceby一样的功能
  */
object GroupByKey {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[2]").setAppName("test")
    val sc = new SparkContext(sparkConf)
    val groupRDD = sc.parallelize(List("hello spark", "hello world", "hello world")).
      flatMap(_.split(" ")).
      map((_, 1))
    val rdd = groupRDD.groupByKey() //(hello,CompactBuffer(1, 1, 1)) (world,CompactBuffer(1, 1)) (spark,CompactBuffer(1))
    val rdd2 = rdd.map(x => (x._1, x._2.toList.sum))

    println(rdd2.collect().mkString(" "))

    sc.stop()
  }
}
