package com.hefei

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author caizhengjun
 * @date 2020/8/27 8:56 下午
 */
object IPCount {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[2]")

    val sc: SparkContext = new SparkContext(conf)

    val source: RDD[String] = sc.textFile("dataset/access_log_sample.txt")

    val ips: RDD[String] = source.map(_.split(" ")(0)).persist()

    val interimRDD: RDD[(String, Int)] = ips.filter(StringUtils.isNotBlank(_)).map((_, 1)).reduceByKey(_ + _)

    interimRDD.unpersist()

    val maxTuple: (String, Int) = interimRDD.sortBy(_._2, ascending = false).first()
    val minTuple: (String, Int) = interimRDD.sortBy(_._2, ascending = true).first()

    println(s"访问最多的IP为: ${maxTuple._1},次数为:${maxTuple._2}\n访问最少的IP为: ${minTuple._1},次数为:${minTuple._2}")
  }

}
