package cn.doitedu.day04

import cn.doitedu.day01.utils.SparkUtil
import cn.doitedu.day03.beans.Student
import org.apache.spark.rdd.RDD

/**
 * @Date 22.4.1
 * @Created by HANGGE
 * @Description
 */
object C07_转换算子_GroupByKey {
  def main(args: Array[String]): Unit = {
    // 统计每个班级的平均分
    val sc = SparkUtil.getSc
    val rdd1 = sc.textFile("data/students.csv")

    val rdd2 = rdd1.map(line => {
      val arr = line.split(",")
      Student(arr(0).toInt, arr(1), arr(2).toInt, arr(3), arr(4).toDouble, arr(5))
    })

    //rdd2的类型不是(kv)没有ByKey
    val rdd3 = rdd2.map(stu=>{
      (stu.grade ,stu)
    })
    // 按照key的值进行分组
    val rdd4 = rdd3.groupByKey()
    rdd4.foreach(tp=>{
      val grade = tp._1
      val list = tp._2.map(_.score).toList
      println(grade+"-的均分是-"+list.sum / list.size)
    })

  }

}
