package RDD

import org.apache.commons.lang3.StringUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test


class AccessLogAgg {
  @Test
  def ipAgg(): Unit = {
    //1、创建SparkContext
    val conf = new SparkConf().setMaster("local").setAppName("Soft_TopN")
    val sc = new SparkContext(conf)
    //2、读取文件
    val sourceRDD = sc.textFile("D:\\SP\\Soft")
    //3、取出IP，赋值为1
    val name_RDD = sourceRDD.flatMap(item=>item.split(" "))
    val nameRDD = name_RDD.map(item=>(item,1))
    //4、简单的清洗
        //4.1、去空
        //4.2、去非法
    val clearRDD = nameRDD.filter(item=>StringUtils.isNotEmpty(item._1))
    //5、根据IP出现的次数进行聚合
    val sum_name_count = clearRDD.reduceByKey((curr,agg)=>curr+agg)
    //6、根据IP出现的次数排序,降序
    val  sortedRDD = sum_name_count.sortBy(item=>item._2,ascending = false)
    //7、取出结果打印结果
    sortedRDD.take(10).foreach(println)
  }
}
