package com.csw.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo08GroupBy {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName("groupBy")
      .setMaster("local")

    val sc: SparkContext = new SparkContext(conf)

    val rdd1: RDD[String] = sc.textFile("spark/data/students.txt")

    /**
      * groupBy：指定一个列进行分组
      */
    val rdd2: RDD[(String, Iterable[String])] = rdd1.groupBy(i => i.split(",")(4))

    //    rdd2.foreach(println)

    //将RDD转换成kv格式
    val rdd3: RDD[(String, String)] = rdd1.map(i => {
      val clazz: String = i.split(",")(4)
      (clazz, i)
    })
//    rdd3.foreach(println)

    /**
      * groupByKey：通过key进行分组，将同一个班级的学生分到同一个组内
      * 只能作用在kv格式的rdd上
      */
    val rdd4: RDD[(String, Iterable[String])] = rdd3.groupByKey()

    rdd4.foreach(println)
  }
}
