package cn.whuc.spark.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo_ProvAdvRank {
  def main(args: Array[String]): Unit = {
    // 1 创建sparkContext
    val sc: SparkContext = new SparkContext(
      new SparkConf()
        .setMaster("local[*]")
        .setAppName(" ")
    )

    // 2 编写代码
    //1516609143869 9 4 75 18
    // 时间戳  省份  城市  用户  广告
    // 省份 广告 点击排行
    val datas: RDD[String] = sc.textFile("input/agent.log")

    val provAdvRDD1: RDD[(String, List[(String, Int)])] = datas.map(line => {
      val strings: Array[String] = line.split(" ")
      ((strings(1), strings(4)), 1)
    }).reduceByKey(_ + _).map {
      case ((a, b), c) => {
        (a, (b, c))
      }
    }.groupByKey().mapValues(_.toList.sortBy(_._2)(Ordering.Int.reverse))


    provAdvRDD1.collect().foreach(println)


    // 3 关闭上下文对象
    sc.stop()
  }
}
