package com._51doit.spark02

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

//分组学科的Topn
object OtherWork {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
    val sc: SparkContext = new SparkContext(conf)

    val path: RDD[String] = sc.textFile("D:\\07spark\\spark-day02\\work\\teacher.log")

    //path.foreach(println)

    val res: RDD[((String, String), Int)] = path.map(tr => {
      val str: Array[String] = tr.split("/+")

      val Tname: String = str(2)

      val allsub: String = str(1)

      val substr: Array[String] = allsub.split("\\.")
      val subject: String = substr(0)

      ((subject, Tname), 1)

    })

    val res1: RDD[((String, String), Int)] = res.reduceByKey(_+_)

    val res2: RDD[(String, Iterable[((String, String), Int)])] = res1.groupBy(_._1._1)


    val res3: RDD[(String, (Iterable[String], Iterable[Int]))] = res2.mapValues(tr => {
      val str: Iterable[String] = tr.map(_._1._2)
      val ints: Iterable[Int] = tr.map(_._2)

      (str, ints)
    })
    val res4: RDD[(String, Iterable[(String, Int)])] = res3.mapValues(a=>a._1.zip(a._2))

    res4.foreach(println)
    val res5: RDD[(String, List[(String, Int)])] = res4.mapValues(op => {
      val list: List[(String, Int)] = op.map({ case (name, num) => (name, num) }).toList

      list.sortBy(_._2).take(3)

    })
    //res5.foreach(println)







    /*res4.map(tb => {
      val key: String = tb._1
      val strings: Iterable[String] = tb._2.map(tc => tc._1)

      val ints: Iterable[Int] = tb._2.map(te=>te._2)




    })*/


   // res5.sortBy(_._2)












































  }




}
