package com.shujia.core

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo09GroupByKey {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo09GroupByKey")
    conf.setMaster("local")

    val sc: SparkContext = new SparkContext(conf)
    // 统计性别人数
    val stuLineRDD: RDD[String] = sc.textFile("Spark/data/students.txt")
    // 将数据变成KV格式 以性别作为Key 1作为Value
    val genderKVRDD: RDD[(String, Int)] = stuLineRDD.map(line => (line.split(",")(3), 1))

    /**
     * groupByKey：转换算子
     * 会直接按照KV格式的RDD的Key进行分组，把属于同一个分组的Key对应的Value全部放入一个Iterable
     */
    val groupRDD: RDD[(String, Iterable[Int])] = genderKVRDD
      .groupByKey()

    groupRDD
      .map(kv => s"${kv._1},${kv._2.size}")
      .foreach(println)

  }

}
