package com.txl.cn.spark02

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

import scala.collection.mutable

/**
  * Created by txl on 2017/12/27.
  */
object TeacherFav2 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Teacher").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val lines = sc.textFile(args(0))
    val res = lines.map({
      line =>
        val str: Array[String] = line.split("/")

        val url = str(2)
        val i = url.indexOf(".", 0)
        val subject = url.substring(0, i)
        val tName = str(3)
        (subject, tName)
    })
    val subs: Array[String] = res.map(t=>t._1).distinct().collect
    val zuhe: RDD[((String, String), Int)] = res.map((_,1))
    val data: RDD[((String, String), Int)] = zuhe.reduceByKey(_+_)
    val partitioned: RDD[((String, String), Int)] = data.partitionBy(new MyPartitioner(subs))
    val res2: RDD[((String, String), Int)] = partitioned.mapPartitions({
      t =>
       t.toList.sortBy(-_._2).take(2) iterator
    })
    res2.foreach(println)

  }
}
class MyPartitioner(subs: Array[String]) extends  Partitioner{
  val subMap=new mutable.HashMap[String,Int]()
  var i=0
  for(e <- subs){
    subMap(e)=i
    i=i+1
  }

  override def numPartitions: Int = subs.size


  override def getPartition(key: Any): Int = {
    val keyTrans: (String, String) = key.asInstanceOf[(String,String)]
    val sub = keyTrans._1
    subMap(sub)
  }
}