package com.txl.cn.spark02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by txl on 2017/12/27.
  */
object TeacherFav {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Teacher").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val lines = sc.textFile(args(0))
    val res = lines.map({
      line =>
        val str: Array[String] = line.split("/")

        val url = str(2)
        val i = url.indexOf(".", 0)
        val subject = url.substring(0, i)
        val tName = str(3)
        (subject, tName)
    })
    val zuhe: RDD[((String, String), Int)] = res.map((_,1))
    val data: RDD[((String, String), Int)] = zuhe.reduceByKey(_+_)
    val data2: RDD[((String, String), Int)] = data.sortBy(- _._2)
    val data3: RDD[(String, Iterable[((String, String), Int)])] = data2.groupBy(_._1._1)
   val data4=data3.map({
     t=>
       (t._1, t._2.take(2))

   })
val data5=data4.map({
   t=>
     val tuples = t._2
     val list: List[((String, String), Int)] = tuples.toList
     //list.map(t=>(t._1._2,t._2))
     val tuples1 = list.map({
       case ((_, v), w) => {
         (v, w)
       }
     })
     (t._1,tuples1)
 })
    data5.foreach(println)


  }


}
