package com.zhang.spark_2.com.zhang.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title:
 * @author: zhang
 * @date: 2022/2/12 16:29 
 */
object Spark08_req {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("map")
    val sc = new SparkContext(conf)

    val data: RDD[String] = sc.textFile("data/agent.log")
   // agent.log：时间戳，省份，城市，用户，广告
    //统计出每一个省份每个广告被点击数量排行的Top3
    //统计每个广告在每个省份出现的次数
    data.map(
      line=>{
        val fields: Array[String] = line.split(" ")
        ((fields(1),fields(4)),1)
      }
      ).reduceByKey(_+_)
      .map{
        case ((prev,ad),cnt)=>{
          (prev,(ad,cnt))
        }
      }.groupByKey().map{
      case (prev,iter)=>{
        val tuples: List[(String, Int)] = iter.toList.sortBy(_._2)(Ordering.Int.reverse).take(3)
        (prev,tuples)
      }
    }
      .collect().foreach(println)


    sc.stop()
  }
}
