package day04.demo

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author wsl
 * @Description 求省份-广告 top
 */
object TopN_Ad {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("rdd").setMaster("local[*]")
    val sc = new SparkContext(conf)

    sc.textFile("sparkcore/input/agent.txt")
      .map(
        line => {
          val word: Array[String] = line.split(" ")
          val city: String = word(1)
          val ad: String = word(4)
          (city + "-" + ad, 1)
        }
      )
      .reduceByKey(_ + _)
      //      .map(
      //        word => {
      //          val str: Array[String] = word._1.toString.split("-")
      //          val city: String = str(0)
      //          val ad: String = str(1)
      //          (city, (ad, word._2))
      //        }
      //      )
      .map {
        case (cityAndAd, sum) => {
          val str: Array[String] = cityAndAd.split("-")
          (str(0), (str(1), sum))
        }
      }
      .groupByKey()
      .mapValues {
        data => {
          data.toList.sortWith(
            (l, r) => {
              l._2 > r._2
            }
          ).take(3)
        }
      }
      .collect().foreach(println)


    sc.stop()

  }
}
