package Test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object TestDemo2 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("map")
    val context = new SparkContext(conf)
    val src: RDD[String] = context.textFile("spark/data/score.txt")
    val base = src.map(line => {
      val sp = line.split(",")
      (sp(0), sp(2).toDouble)
    }).groupByKey
     val avg =  base.mapValues(e=>e.sum./(e.size))
    val res:RDD[(String,List[Double],Double)] = base.leftOuterJoin(avg).map(e=>{
      (e._1,e._2._1.toList,e._2._2.getOrElse(default = 0))
    })
     val s =  res.map{
      case (id,score,avg)=> {
        (id,(score, score.map(e => e.-(avg).*(e.-(avg)))))
      }
      }.mapValues(e=>(e._1,e._2.sum./(e._2.size))).sortBy(e=> -e._2._2).take(10).map{
       case (id,(score,avg))=>{
         (id,score,f"方差为$avg%.2f")
       }
     }
    s.foreach(println)
  }

}
