package Count

import org.apache.commons.lang.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

object Top3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName(s"${this.getClass.getName}")
      .master("local[*]")
      .getOrCreate()
    //val sc = spark.sparkContext
    val dataFrame = spark.read.json("D:\\asa\\dianshang\\pay - 副本.log")
    val clean: RDD[((String, String), Int)] = dataFrame.rdd.map(row => {
      val cid = row.getAs[String]("cid")
      val pid = row.getAs[String]("pid")
      ((cid,pid), 1)
    }).filter(x=>(StringUtils.isNotEmpty(x._1._1)&& StringUtils.isNotEmpty(x._1._2)))
      .reduceByKey(_+_)
    clean.foreach(println)
    //((1,5ace32e357f58a36876e25be),4)
    //((4,5ad16c9a57f58a3305ac0306),3)
    //((4,5ace2f3c57f58a36876e25bb),7)
    //((1,5ace2f8157f58a36876e25bc),4)
    //((1,5ace323c57f58a36876e25bd),8)
    //((4,5ace2d7057f58a36876e25ba),7)

    clean.groupBy(x=>x._1._1).mapValues(it=>{
      val treeSet = new mutable.TreeSet[((String, String), Int)]()(Ordering[Int].on(t => -t._2))
      it.foreach(x => {
        treeSet.add(x)
        if (treeSet.size > 3) {
          treeSet.remove(treeSet.last)
        }
      })
      treeSet
    })
      //.foreach(println)
    //(4,TreeSet(((4,5ace2d7057f58a36876e25ba),7), ((4,5ad16c9a57f58a3305ac0306),3)))
    //(1,TreeSet(((1,5ace323c57f58a36876e25bd),8), ((1,5ace2f8157f58a36876e25bc),4)))

    implicit val ordering=Ordering[Int].on[((String, String), Int)](x=> ( -x._2))
    val sort = clean.sortBy(x=>x)
    sort.groupBy(_._1._1)
      .take(3)
      //.foreach(println)
    //((1,5ace323c57f58a36876e25bd),8)
    //((1,5ace32e357f58a36876e25be),4)
    //((1,5ace2f8157f58a36876e25bc),4)
    //((4,5ace2f3c57f58a36876e25bb),7)
    //((4,5ace2d7057f58a36876e25ba),7)
    //((4,5ad16c9a57f58a3305ac0306),3)

    spark.stop()
  }
}
