package cn.rslee.java.demos.test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object GroupByKey {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setAppName("GroupByKey").setMaster("local"))
    val list = List("hadoop", "spark", "hive", "spark")
    val rdd = sc.parallelize(list)
    println("test1-----")
    rdd.map((_, 1)).foreach(println);

    println("test2-----")
    rdd.map(x => (x, 1)).foreach(println);

    println("test3-----")
    val pairRdd = rdd.map(x => (x, 1))
    pairRdd.groupByKey().collect.foreach(println)

    println("test4-----")
    pairRdd.groupByKey().collect().map(x => (x._1, x._2.sum)).foreach(println);
    
    println("test5-----")
    rdd.map((_, 1)).reduceByKey(_ + _).foreach(println);
  }
}