package day04

import java.net.URL

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

import scala.collection.mutable

object subjectModoleCount3 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("subjectModuleCount3").setMaster("local[2]")
    val sc = new SparkContext(conf)
    val logs: RDD[String] = sc.textFile("D://视频资料/大数据/spark/Day04/access.txt")
    val url: RDD[String] = logs.map(line => line.split("\t")(1))
    val tupUrl: RDD[(String, Int)] = url.map((_,1))
    val reducedUrl: RDD[(String, Int)] = tupUrl.reduceByKey(_+_).cache()
    //获得学科数据：(subject,(url,count))。因为要以subject来做key去定义分类器，所以这里要设置为二元组的形式
    val subjectInfo: RDD[(String, (String, Int))] = reducedUrl.map(x => {
      val url: String = x._1
      val count: Int = x._2
      val subject: String = new URL(url).getHost
      (subject, (url, count))
    }).cache()
    /**
      * 从这里就开始加自定义分类器了。
      */
    //先获取去重后的subject。根据这个设置自定义分区器的key。
    val subjects: Array[String] = subjectInfo.keys.distinct().collect()
    //创建自定义分区器
    val partition = new MySubjectPartition(subjects)
    //将数据按自定义分区器进行分区
    val partitioned: RDD[(String, (String, Int))] = subjectInfo.partitionBy(partition)
    //分区后对分区数据处理
    //注意mapPartitions的参数是iterator所以要先转换成List，mapPartitions输出格式也是iterator
    val res: RDD[(String, (String, Int))] = partitioned.mapPartitions(it => {
      it.toList.sortBy(_._2._2).reverse.take(3).iterator
    })

    //这次换位保存到本地磁盘，注意这个output目录不要提前创建好
    res.saveAsTextFile("D://aaa/output")
  }

}

//自定义分区器，必须要先继承Partitioner，然后重写numPartitions和getPartition方法
//首先，先定义好自己要自定义的分区名和分区编号
class MySubjectPartition(subjects: Array[String]) extends Partitioner{
  //首先，先定义好自己要自定义的分区名和分区编号
  val sp = new mutable.HashMap[String,Int]()
  var i: Int = 0
  for (subject <- subjects) {
    sp += (subject -> i)
    i += 1
  }
  //获取分区数
  override def numPartitions: Int = subjects.length
  //获取分区编号
  override def getPartition(key: Any): Int = sp.getOrElse(key.toString,0)
}

