package com.weic.spark.scala.hw

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

import scala.collection.mutable

/**
 * @Auther:BigData-weic
 * @ClassName:_03Work
 * @Date:2020/12/14 20:49
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object _03Work {
	def main(args: Array[String]): Unit = {
		val conf = new SparkConf()
			.setAppName("_03Work")
			.setMaster("local[*]")
		val sc = new SparkContext(conf)
		val lines: RDD[String] = sc.textFile("file:\\F:\\datas\\work12_14\\topn.txt")
		val scoreInfoRDD: RDD[(String, String)] = lines.map(line => {
			val strs = line.split("\t")
			val subject = strs(0)
			val name = strs(1)
			val score = strs(2)
			(subject, name + "|" + score)
		})

		//分组求topN combinerByKey
		val groupTop3RDD: RDD[(String, mutable.TreeSet[String])] = scoreInfoRDD.combineByKey(createCombiner, mergeValue, mergeCombiner)
		//遍历结果
		groupTop3RDD.foreach { case (subject, infos) => {
			println(s"$subject\t${infos.mkString("[", ",", "]")}")
		}
		}

		//释放资源
		sc.stop()
	}

	def createCombiner(info: String): mutable.TreeSet[String] = {
		mutable.TreeSet(info)(new Ordering[String] {
			override def compare(x: String, y: String): Int = {
				val left = x.split("\\|")(1).toDouble
				val right = y.split("\\|")(1).toDouble
				right.compareTo(left)
			}
		})
	}

	def mergeValue(infos: mutable.TreeSet[String], info: String): mutable.TreeSet[String] = {
		infos.add(info)
		if (infos.size > 3) {
			infos.dropRight(1)
		} else {
			infos
		}
	}

	def mergeCombiner(infos: mutable.TreeSet[String], infos1: mutable.TreeSet[String]): mutable.TreeSet[String] = {
		for (info <- infos1) {
			infos.add(info)
		}
		if (infos.size > 3) {
			infos.dropRight(infos.size - 3)
		} else {
			infos
		}
	}

}
