package com.king.spark.rdd.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author wdl
 * @Date 2022/11/22 16:34
 */
object Spark_RDD_Top3 {

  def main(args: Array[String]): Unit = {


    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

    val sparkContext: SparkContext = new SparkContext(sparkConf)
    val textRDD: RDD[String] = sparkContext.textFile("E:\\work\\big-data-2020\\spark-api\\input\\agent.log")
    val value: RDD[((String, String), Int)] = textRDD.map(v => {
      val lines: Array[String] = v.split(" ")
      ((lines(1), lines(4)), 1)
    })
    val value1: RDD[((String, String), Int)] = value.reduceByKey((x, y) => {
      x + y
    })

    val value2: RDD[(String, (String, Int))] = value1.map(x => {
      (x._1._1, (x._1._2, x._2))
    })

    val value4: RDD[(String, Iterable[(String, Int)])] = value2.groupByKey()
    value4.map(x => {
      (x._1, x._2.toList.sortBy(_._2)(Ordering.Int.reverse).take(3))
    }).collect().foreach(println)

    sparkContext.stop()
  }
}