package com.qingguo.homework

import org.apache.spark.{SparkConf, SparkContext}

object student4 {

  def main(args: Array[String]): Unit = {

    /**
      * 4、统计偏科最严重的前100名学生  [学号，姓名，班级，科目，分数]
      *
      */

    //创建Spark任务
    val conf = new SparkConf()
    conf.setMaster("local").setAppName("student4")
    val sc = new SparkContext(conf)

    /**
      * 计算每个学生所有分数的方差
      * 由于每科总分不一样   所以需要将每一科的分数拉取同一个水平（归一化，标准化）
      */


    //读取数据源
    val score = sc.textFile("Spark/data/score.txt")
    val cource = sc.textFile("Spark/data/cource.txt")


    val scoreKvRDD = score.map(line => {
      val split = line.split(",")
      val id = split(0)
      val kmbh = split(1)
      val score = split(2)
      (kmbh, id + "_" + score)
    })

    val courceKvRDD = cource.map(line => {
      val split = line.split(",")
      val kmbh = split(0)
      val kmm = split(1)
      val maxscore = split(2)
      (kmbh, kmm + "_" + maxscore)
    })

    //取归一化分数
    val scoregyhRDD = courceKvRDD
      .join(scoreKvRDD)
      .map(line => {
        val kmm = line._2._1.split("_")(0)
        val maxscore = line._2._1.split("_")(1)
        val id = line._2._2.split("_")(0)
        val score = line._2._2.split("_")(1)
        val scoregyh = score.toDouble / maxscore.toDouble
        (id, kmm + "_" + id + "_" + score + "_" + maxscore + "_" + scoregyh)
      })

    //取归一化总分
    val sumscoregyhRDD = scoregyhRDD
      .map(line => {
        val id = line._1
        val sumscoregyh = line._2.split("_")(4).toDouble / 1.00
        (id, sumscoregyh)
      })
      .reduceByKey(_ + _)
      .map(kv => {
        val id = kv._1
        val sumscore = kv._2
        val sumsco = sumscore / 1.00
        (id, sumsco)
      })


    val resultRDD = sumscoregyhRDD.join(scoregyhRDD)
      .map(line => {
        val id = line._1
        val avgscore = line._2._1 / 6.00
        val kmm = line._2._2.split("_")(0)
        val score = line._2._2.split("_")(2)
        val maxscore = line._2._2.split("_")(3)
        val scoregyh = line._2._2.split("_")(4)
        val avg_avgscore = (scoregyh.toDouble - avgscore) * (scoregyh.toDouble - avgscore)
        (id, avg_avgscore)
      })
      .reduceByKey(_ + _)

    val resRDD = resultRDD.map(line => {
      val id = line._1
      val s = line._2 / 6.00
      (id, s)
    })
      .sortBy(-_._2)
      .take(10)
      .map(_._1)

    val result = score.filter(line => {
      val id = line.split(",")(0)
      resRDD.contains(id)
    }).foreach(println)

    /**
      * contains
      *
      */

  }


}
