package com.neusoft.bd.myspark.spark01

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object M22Agent {

  //
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("Operator")
    val sc: SparkContext = new SparkContext(conf)
// 时间戳，省份，城市，用户，广告
    val value: RDD[(String, Iterable[(String, Int)])] = sc.textFile("input/agent.log")
      .map(line => {
        val words: Array[String] = line.split(" ")
        ((words(1), words(4)), 1)
      })
      //      相同的key两两聚合
      .reduceByKey(_ + _)
      .map {
        case ((prv, ad), sum) => (prv, (ad, sum))
      }
      .groupByKey()



    value.mapValues( iter => {
          iter
            .toList
            .sortBy( _._2)(Ordering.Int.reverse)
            .take(3)
      })
      .collect().foreach( println )
  }
}
