package com.weic.spark.scala.p3.sotrt

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @Auther:BigData-weic
 * @ClassName:GroupTopN
 * @Date:2020/12/8 19:32
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object GroupTopN {
	def main(args: Array[String]): Unit = {
		val conf = new SparkConf()
    		.setAppName("GroupTopN")
    		.setMaster("local[*]")
		//获取连接
		val sc = new SparkContext(conf)
		val scoreRDD = sc.parallelize(List(
			"chinese ls 91",
			"english ww 56",
			"chinese zs 90",
			"chinese zl 76",
			"english zq 88",
			"chinese wb 95",
			"chinese sj 74",
			"english ts 87",
			"english ys 67",
			"english mz 77",
			"chinese yj 98",
			"english gk 96"
		))
		// map
		val scoreInfoRDD: RDD[(String,String)]= scoreRDD.map(line => {
			val strs = line.split("\\s+")
			val subject = strs(0)
			val name = strs(1)
			val score = strs(2)
			(subject, name + "|" + score)
		})

		//分组求topN combinerByKey
		val groupTop3RDD : RDD[(String,mutable.TreeSet[String])] = scoreInfoRDD.combineByKey(createCombiner,mergeValue,mergeCombiner)
		//遍历结果
		groupTop3RDD.foreach{case (subject,infos) => {
			println(s"$subject\t${infos.mkString("[",",","]")}")
		}}

		//释放资源
		sc.stop()
	}

	def createCombiner(info: String): mutable.TreeSet[String] ={
		mutable.TreeSet(info)(new Ordering[String] {
			override def compare(x: String, y: String): Int = {
				val left = x.split("\\|")(1).toDouble
				val right = y.split("\\|")(1).toDouble
				right.compareTo(left)
			}
		})
	}
	def mergeValue(infos:mutable.TreeSet[String],info:String): mutable.TreeSet[String]={
		infos.add(info)
		if (infos.size > 3){
			infos.dropRight(1)
		}else{
			infos
		}
	}
	def mergeCombiner(infos:mutable.TreeSet[String],infos1:mutable.TreeSet[String]): mutable.TreeSet[String]={
		for(info <- infos1){
			infos.add(info)
		}
		if (infos.size > 3){
			infos.dropRight(infos.size - 3)
		}else{
			infos
		}
	}
}
