package com.doitedu.test

import com.doitedu.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @Date: 22.6.29 
 * @Author: HANGGE
 * @qq: 598196583
 * @Tips: 学大数据 ,到多易教育
 * @Description:
 */
object BoBo {

  def main(args: Array[String]): Unit = {
    // 每门科目的最高成绩信息
    val sc = SparkUtil.getSparkContext("Demo")
    val rdd = sc.textFile("data/stu/stu.txt")
    val rdd1 = rdd.map(line => {
      val arr = line.split(",")
      (arr(0).toInt,arr(1),arr(2),arr(3).toDouble)
    })
    val grouped = rdd1.groupBy(_._3)
    val rdd2 = grouped.map(tp => {
      val subject = tp._1
      val score = tp._2.map(tp => {
        tp._4
      })
      (subject, score)
    })
      rdd2.map(tp=>{
        (tp._1,tp._2.max)
      }).foreach(println)
  }

}
