package com.shujia.spark

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo8GroupBy {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("map")


    val sc = new SparkContext(conf)

    val rdd1: RDD[String] = sc.textFile("spark/data/students.txt")

    /**
      * groupBy: 指定一个列进行分期
      *
      */

    val rdd2: RDD[(String, Iterable[String])] = rdd1.groupBy(line => line.split(",")(4))

    //    rdd2.foreach(println)


    //将rdd转换成kv格式
    val rdd3: RDD[(String, String)] = rdd1.map(line => {
      val clazz: String = line.split(",")(4)
      (clazz, line)
    })

    /**
      * groupByKey: 通过key进行分组，将通过一个班级的学生分到同一个组内
      * 只能作用在kv格式的rdd上
      *
      */
    val rdd4: RDD[(String, Iterable[String])] = rdd3.groupByKey()


    rdd4.foreach(println)
  }
}
