package com.catmiao.spark.rdd.operator.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title: RDD_Operator_01_transform
 * @projectName spark_study
 * @description: TODO
 * @author ChengMiao
 * @date 2024/2/2 11:48
 */
object RDD_Operator_21_transform_last {

  def main(args: Array[String]): Unit = {
    val sparkCon = new SparkConf().setMaster("local[*]").setAppName("rdd")

    val sparkContext = new SparkContext(sparkCon)


    /**
     * 1516609143867 6 7 64 16
     * 时间戳 省份 城市 用户 广告
     *
     * 每个省份 每个广告 被点击数量排行 top3
     */

    val rdd: RDD[String] = sparkContext.textFile("datas/agent.log")

    val value: RDD[(String, String, Int)] = rdd.map(
      i => {
        val r = i.split(" ")
        (r(1), r(4), 1)
      }
    )


    val rdd1: RDD[((String, String), Iterable[(String, String, Int)])] = value.groupBy(
      i => {
        (i._1, i._2)
      }
    )

    val rdd2: RDD[(String, String, Int)] = rdd1.map(
      k => {
        (k._1._1, k._1._2, k._2.size)
      }
    )


    val rdd3: RDD[(String, Iterable[(String, String, Int)])] = rdd2.groupBy(_._1)

    val result = rdd3.map(
      k => {
        val key = k._1
        val v = k._2
        val tuples: List[(String, String, Int)] = v.toList.sortBy(_._3)(Ordering.Int.reverse).take(3)

        (key,tuples)
      }
    )

    result.collect().foreach(println)



    sparkContext.stop()
  }

}
