package com.dxf.bigdata.D04_spark

import java.util

import org.apache.commons.logging.{Log, LogFactory}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 *
 *
 */
object T31_案例实操 {
  private val log: Log = LogFactory.getLog(T08_RDD使用_从日志中读取uri.getClass)

  def main(args: Array[String]): Unit = {

    //TODO 准备环境
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD")
    val sc = new SparkContext(sparkConf)

    //TODO 创建RDD 案例实操

    val rddLine: RDD[String] = sc.textFile("datas/agent.log")
    val rddMap: RDD[((String, String), Int)] = rddLine.map(line => {
      val words: Array[String] = line.split(" ")
      ((words(1), words(4)), 1)
    })

    val reduceRDD: RDD[((String, String), Int)] = rddMap.reduceByKey((x, y) => x + y)

    //(省份,(广告,sum))
    val mapRdd2: RDD[(String, (String, Int))] = reduceRDD.map(x => {
      (x._1._1, (x._1._2, x._2))
    })

    //(省份 [(广告,sum),(广告,sum)])
    val rddGroup: RDD[(String, Iterable[(String, Int)])] = mapRdd2.groupByKey()

    val resultRDD: RDD[(String, List[(String, Int)])] = rddGroup.mapValues(iter => {
      iter.toList.sortBy(_._2)(Ordering.Int.reverse).take(3)
    })
    resultRDD.collect().foreach(println)





    //TODO 关闭环境
    sc.stop()


  }
}
