package com.xzx.spark.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark017_KeyValue_GroupByKey {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark017_KeyValue_GroupByKey")
    val context = new SparkContext(conf)
    //1.将 List("Hello", "hive", "hbase", "Hadoop")根据单词首写字母进行分组。
    val rdd: RDD[String] = context.makeRDD(List("Hello", "Scala", "Spark", "Hadoop"))
    val group: RDD[(Char, Iterable[String])] = rdd.map(x => (x.charAt(0), x)).groupByKey()
    group.collect().foreach(println)

    context.stop()
  }
}
