package com.shujia.spark.core

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo26Student {
  def main(args: Array[String]): Unit = {


    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("student")


    val sc = new SparkContext(conf)


    val scoreRDD: RDD[String] = sc.textFile("data/score.txt")
    val courceRDD: RDD[String] = sc.textFile("data/cource.txt")

    val scoreKVRDD: RDD[(String, (String, Double))] = scoreRDD.map(line => {
      val split: Array[String] = line.split(",")
      (split(1), (split(0), split(2).toDouble))
    })

    val counrceKVRDD: RDD[(String, Double)] = courceRDD.map(line => {
      val split: Array[String] = line.split(",")
      val couId: String = split(0)
      val sumSco: Double = split(2).toDouble
      (couId, sumSco)
    })

    val joinRDD: RDD[(String, ((String, Double), Double))] = scoreKVRDD.join(counrceKVRDD)


    val kvRDD: RDD[(String, Int)] = joinRDD.map {
      case (couIdLString, ((id: String, score: Double), sumScore: Double)) => {
        if (score < sumScore * 0.6) {
          (id, 0)
        } else {
          (id, 1)
        }
      }
    }
    //统计每个学生及格的科目数

    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey(_ + _)

    //取出都及格的学生
    val jigeStu: RDD[(String, Int)] = countRDD.filter(_._2 == 6)


    jigeStu.foreach(println)

  }
}
