package com.spark.WorCount

import com.alibaba.fastjson.JSON
import org.apache.spark.{SparkConf, SparkContext}

object TestTopN {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("TestTopN")
      .setMaster("local")
    val sparkContext = new SparkContext(conf)
    val video=sparkContext.textFile("datas/video_info.log");
    val gift=sparkContext.textFile("datas/gift_record.log")
    val videoRdd=video.map(lines=>{
      val jsonObj=JSON.parseObject(lines)
      val uid=jsonObj.getString("uid")
      val vid=jsonObj.getString("vid")
      val area=jsonObj.getString("area")
      (vid,(uid,area))
    })
    val giftRdd=gift.map(lines=>{
      val jsonObj=JSON.parseObject(lines)
      val vid=jsonObj.getString("vid")
      val gold=Integer.parseInt(jsonObj.getString("gold"))
      (vid,gold)
    })
    val giftRedcueRdd=giftRdd.reduceByKey(_ + _)
    val joinRdd=videoRdd.join(giftRedcueRdd).map(tup=>{
      val uid=tup._2._1._1
      val area=tup._2._1._2
      val goldSum=tup._2._2
      ((uid,area),goldSum)
    })
    val joinReduceByKeyRdd=joinRdd.reduceByKey(_+_).map(tup=>{
      (tup._1._2,(tup._1._1,tup._2))
    })
    val groupRdd=joinReduceByKeyRdd.groupByKey()
    val top3=groupRdd.map(tup=>{
      val area=tup._1
      val top3=tup._2.toList
        .sortBy(_._2)
        .reverse
        .take(3)
        .map(tup=>tup._1+":"+tup._2)
        .mkString(",")
      (area,top3)
    })
    top3.foreach(tup=>{
      println(tup._1+"\t"+tup._2)
    })

  sparkContext.stop()
  }
}
