package com.shujia.core

import com.shujia.core.Demo10Join.Student
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo12GroupByKey {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo12GroupByKey")
    conf.setMaster("local")

    val sc: SparkContext = new SparkContext(conf)

    // 读取学生数据及分数数据 并将每一行数据转换成样例类对象
    val stuRDD: RDD[Student] = sc
      .textFile("Spark/data/students.txt")
      .map(line => {
        val splits: Array[String] = line.split(",")
        val id: String = splits(0)
        val name: String = splits(1)
        val age: Int = splits(2).toInt
        val gender: String = splits(3)
        val clazz: String = splits(4)
        Student(id, name, age, gender, clazz)
      })
    // 统计班级人数
    stuRDD
      .groupBy(stu => stu.clazz)
      .map(kv => s"${kv._1},${kv._2.size}")
      .foreach(println)

    // 将stuRDD变成KV格式 使用groupByKey进行分组
    stuRDD
      .map(stu => (stu.clazz, 1))
      .groupByKey()
      .map(kv => s"${kv._1},${kv._2.size}")
      .foreach(println)


  }

}
